diff options
401 files changed, 13991 insertions, 5696 deletions
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 392ed30c6..4fe86d5ec 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -2,7 +2,7 @@ name: Feature request about: Suggest an idea for this project title: '' -labels: improvement +labels: feature assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/missing_documentation.md b/.github/ISSUE_TEMPLATE/missing_documentation.md new file mode 100644 index 000000000..fbabd868e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/missing_documentation.md @@ -0,0 +1,28 @@ +--- +name: Missing or incorrect documentation +about: Help us improve the reference manual +title: '' +labels: documentation +assignees: '' + +--- + +## Problem + +<!-- describe your problem --> + +## Checklist + +<!-- make sure this issue is not redundant or obsolete --> + +- [ ] checked [latest Nix manual] \([source]) +- [ ] checked [open documentation issues and pull requests] for possible duplicates + +[latest Nix manual]: https://nixos.org/manual/nix/unstable/ +[source]: https://github.com/NixOS/nix/tree/master/doc/manual/src +[open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation + +## Proposal + +<!-- propose a solution --> + diff --git a/.github/stale.yml b/.github/stale.yml index fe24942f4..ee831135a 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -1,10 +1,9 @@ # Configuration for probot-stale - https://github.com/probot/stale daysUntilStale: 180 -daysUntilClose: 365 +daysUntilClose: false exemptLabels: - "critical" + - "never-stale" staleLabel: "stale" -markComment: | - I marked this as stale due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md) -closeComment: | - I closed this issue due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md) +markComment: false +closeComment: false diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index ec7ab4516..75be788ef 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -2,20 +2,26 @@ name: Backport on: pull_request_target: types: [closed, labeled] +permissions: + contents: read jobs: backport: name: Backport Pull Request + permissions: + # for zeebe-io/backport-action + contents: write + pull-requests: write if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name)) runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.sha }} # required to find all branches fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v0.0.7 + uses: zeebe-io/backport-action@v0.0.8 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 09436b7e3..dafba6d85 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,54 +4,62 @@ on: pull_request: push: +permissions: read-all + jobs: tests: - needs: [check_cachix] + needs: [check_secrets] strategy: matrix: os: [ubuntu-latest, macos-latest] runs-on: ${{ matrix.os }} timeout-minutes: 60 steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v16 + - uses: cachix/install-nix-action@v18 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v10 - if: needs.check_cachix.outputs.secret == 'true' + - uses: cachix/cachix-action@v12 + if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - run: nix --experimental-features 'nix-command flakes' flake check -L - check_cachix: - name: Cachix secret present for installer tests + check_secrets: + permissions: + contents: none + name: Check Cachix and Docker secrets present for installer tests runs-on: ubuntu-latest outputs: - secret: ${{ steps.secret.outputs.secret }} + cachix: ${{ steps.secret.outputs.cachix }} + docker: ${{ steps.secret.outputs.docker }} steps: - - name: Check for Cachix secret + - name: Check for secrets id: secret env: _CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }} - run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}" + _DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }} + run: | + echo "::set-output name=cachix::${{ env._CACHIX_SECRETS != '' }}" + echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}" installer: - needs: [tests, check_cachix] - if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true' + needs: [tests, check_secrets] + if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true' runs-on: ubuntu-latest outputs: installerURL: ${{ steps.prepare-installer.outputs.installerURL }} steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v16 - - uses: cachix/cachix-action@v10 + - uses: cachix/install-nix-action@v18 + - uses: cachix/cachix-action@v12 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' @@ -60,37 +68,45 @@ jobs: run: scripts/prepare-installer-for-github-actions installer_test: - needs: [installer, check_cachix] - if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true' + needs: [installer, check_secrets] + if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true' strategy: matrix: os: [ubuntu-latest, macos-latest] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v16 + - uses: cachix/install-nix-action@v18 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" - - run: nix-instantiate -E 'builtins.currentTime' --eval + - run: sudo apt install fish zsh + if: matrix.os == 'ubuntu-latest' + - run: brew install fish + if: matrix.os == 'macos-latest' + - run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval" + - run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval" + - run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval" + - run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval" docker_push_image: - needs: [check_cachix, tests] + needs: [check_secrets, tests] if: >- github.event_name == 'push' && github.ref_name == 'master' && - needs.check_cachix.outputs.secret == 'true' + needs.check_secrets.outputs.cachix == 'true' && + needs.check_secrets.outputs.docker == 'true' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v16 + - uses: cachix/install-nix-action@v18 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v10 - if: needs.check_cachix.outputs.secret == 'true' + - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV + - uses: cachix/cachix-action@v12 + if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' @@ -100,7 +116,7 @@ jobs: - run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION - run: docker tag nix:$NIX_VERSION nixos/nix:master - name: Login to Docker Hub - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/hydra_status.yml b/.github/workflows/hydra_status.yml index b97076bd7..38a9c0877 100644 --- a/.github/workflows/hydra_status.yml +++ b/.github/workflows/hydra_status.yml @@ -1,15 +1,19 @@ name: Hydra status + +permissions: read-all + on: schedule: - cron: "12,42 * * * *" workflow_dispatch: + jobs: check_hydra_status: name: Check Hydra status if: github.repository_owner == 'NixOS' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 with: fetch-depth: 0 - run: bash scripts/check-hydra-status.sh diff --git a/.gitignore b/.gitignore index 4b290425a..8e0db013f 100644 --- a/.gitignore +++ b/.gitignore @@ -22,11 +22,13 @@ perl/Makefile.config /doc/manual/src/SUMMARY.md /doc/manual/src/command-ref/new-cli /doc/manual/src/command-ref/conf-file.md -/doc/manual/src/expressions/builtins.md +/doc/manual/src/language/builtins.md # /scripts/ /scripts/nix-profile.sh /scripts/nix-profile-daemon.sh +/scripts/nix-profile.fish +/scripts/nix-profile-daemon.fish # /src/libexpr/ /src/libexpr/lexer-tab.cc @@ -35,6 +37,7 @@ perl/Makefile.config /src/libexpr/parser-tab.hh /src/libexpr/parser-tab.output /src/libexpr/nix.tbl +/src/libexpr/tests/libexpr-tests # /src/libstore/ *.gen.* @@ -79,6 +82,7 @@ perl/Makefile.config /tests/shell.drv /tests/config.nix /tests/ca/config.nix +/tests/repl-result-out # /tests/lang/ /tests/lang/*.out @@ -90,6 +94,7 @@ perl/Makefile.config /misc/systemd/nix-daemon.service /misc/systemd/nix-daemon.socket +/misc/systemd/nix-daemon.conf /misc/upstart/nix-daemon.conf /src/resolve-system-dependencies/resolve-system-dependencies @@ -1 +1 @@ -2.7.0
\ No newline at end of file +2.12.0
\ No newline at end of file @@ -8,6 +8,7 @@ makefiles = \ src/libfetchers/local.mk \ src/libmain/local.mk \ src/libexpr/local.mk \ + src/libexpr/tests/local.mk \ src/libcmd/local.mk \ src/nix/local.mk \ src/resolve-system-dependencies/local.mk \ @@ -27,7 +28,8 @@ makefiles = \ OPTIMIZE = 1 ifeq ($(OPTIMIZE), 1) - GLOBAL_CXXFLAGS += -O3 + GLOBAL_CXXFLAGS += -O3 $(CXXLTO) + GLOBAL_LDFLAGS += $(CXXLTO) else GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE endif diff --git a/Makefile.config.in b/Makefile.config.in index 3505f337e..1c5405c6d 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -1,4 +1,3 @@ -HOST_OS = @host_os@ AR = @AR@ BDW_GC_LIBS = @BDW_GC_LIBS@ BOOST_LDFLAGS = @BOOST_LDFLAGS@ @@ -7,18 +6,20 @@ CC = @CC@ CFLAGS = @CFLAGS@ CXX = @CXX@ CXXFLAGS = @CXXFLAGS@ +CXXLTO = @CXXLTO@ EDITLINE_LIBS = @EDITLINE_LIBS@ ENABLE_S3 = @ENABLE_S3@ GTEST_LIBS = @GTEST_LIBS@ HAVE_LIBCPUID = @HAVE_LIBCPUID@ HAVE_SECCOMP = @HAVE_SECCOMP@ +HOST_OS = @host_os@ LDFLAGS = @LDFLAGS@ LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@ LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ LIBCURL_LIBS = @LIBCURL_LIBS@ +LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@ LOWDOWN_LIBS = @LOWDOWN_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ -LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ SHELL = @bash@ @@ -30,6 +31,7 @@ datadir = @datadir@ datarootdir = @datarootdir@ doc_generate = @doc_generate@ docdir = @docdir@ +embedded_sandbox_shell = @embedded_sandbox_shell@ exec_prefix = @exec_prefix@ includedir = @includedir@ libdir = @libdir@ @@ -20,7 +20,7 @@ Information on additional installation methods is available on the [Nix download ## Building And Developing -See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual/contributing/hacking.html) in our manual for instruction on how to +See our [Hacking guide](https://nixos.org/manual/nix/stable/contributing/hacking.html) in our manual for instruction on how to build nix from source with nix-build or how to get a development environment. ## Additional Resources diff --git a/boehmgc-coroutine-sp-fallback.diff b/boehmgc-coroutine-sp-fallback.diff index e659bf470..8fdafbecb 100644 --- a/boehmgc-coroutine-sp-fallback.diff +++ b/boehmgc-coroutine-sp-fallback.diff @@ -1,3 +1,35 @@ +diff --git a/darwin_stop_world.c b/darwin_stop_world.c +index 3dbaa3fb..36a1d1f7 100644 +--- a/darwin_stop_world.c ++++ b/darwin_stop_world.c +@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void) + int nthreads = 0; + word total_size = 0; + mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ; ++ size_t stack_limit; + if (!EXPECT(GC_thr_initialized, TRUE)) + GC_thr_init(); + +@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void) + GC_push_all_stack_sections(lo, hi, p->traced_stack_sect); + } + if (altstack_lo) { ++ // When a thread goes into a coroutine, we lose its original sp until ++ // control flow returns to the thread. ++ // While in the coroutine, the sp points outside the thread stack, ++ // so we can detect this and push the entire thread stack instead, ++ // as an approximation. ++ // We assume that the coroutine has similarly added its entire stack. ++ // This could be made accurate by cooperating with the application ++ // via new functions and/or callbacks. ++ stack_limit = pthread_get_stacksize_np(p->id); ++ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack ++ altstack_lo = altstack_hi - stack_limit; ++ } ++ + total_size += altstack_hi - altstack_lo; + GC_push_all_stack(altstack_lo, altstack_hi); + } diff --git a/pthread_stop_world.c b/pthread_stop_world.c index 4b2c429..1fb4c52 100644 --- a/pthread_stop_world.c diff --git a/configure.ac b/configure.ac index 8a01c33ec..64fa12fc7 100644 --- a/configure.ac +++ b/configure.ac @@ -147,6 +147,20 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then LDFLAGS="-latomic $LDFLAGS" fi +# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin +AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]), + lto=$enableval, lto=no) +if test "$lto" = yes; then + if $CXX --version | grep -q GCC; then + AC_SUBST(CXXLTO, [-flto=jobserver]) + else + echo "error: LTO is only supported with GCC at the moment" >&2 + exit 1 + fi +else + AC_SUBST(CXXLTO, [""]) +fi + PKG_PROG_PKG_CONFIG AC_ARG_ENABLE(shared, AS_HELP_STRING([--enable-shared],[Build shared libraries for Nix [default=yes]]), @@ -282,18 +296,28 @@ AC_CHECK_FUNCS([setresuid setreuid lchown]) AC_CHECK_FUNCS([strsignal posix_fallocate sysconf]) -# This is needed if bzip2 is a static library, and the Nix libraries -# are dynamic. -case "${host_os}" in - darwin*) - LDFLAGS="-all_load $LDFLAGS" - ;; -esac - - AC_ARG_WITH(sandbox-shell, AS_HELP_STRING([--with-sandbox-shell=PATH],[path of a statically-linked shell to use as /bin/sh in sandboxes]), sandbox_shell=$withval) AC_SUBST(sandbox_shell) +if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then + AC_MSG_CHECKING([whether sandbox-shell has the standalone feature]) + # busybox shell sometimes allows executing other busybox applets, + # even if they are not in the path, breaking our sandbox + if PATH= $sandbox_shell -c "busybox" 2>&1 | grep -qv "not found"; then + AC_MSG_RESULT(enabled) + AC_MSG_ERROR([Please disable busybox FEATURE_SH_STANDALONE]) + else + AC_MSG_RESULT(disabled) + fi +fi + +AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]), + embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no) +AC_SUBST(embedded_sandbox_shell) +if test "$embedded_sandbox_shell" = yes; then + AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.]) +fi + # Expand all variables in config.status. test "$prefix" = NONE && prefix=$ac_default_prefix diff --git a/doc/manual/anchors.jq b/doc/manual/anchors.jq new file mode 100755 index 000000000..72309779c --- /dev/null +++ b/doc/manual/anchors.jq @@ -0,0 +1,31 @@ +"\\[\\]\\{#(?<anchor>[^\\}]+?)\\}" as $empty_anchor_regex | +"\\[(?<text>[^\\]]+?)\\]\\{#(?<anchor>[^\\}]+?)\\}" as $anchor_regex | + + +def transform_anchors_html: + . | gsub($empty_anchor_regex; "<a name=\"" + .anchor + "\"></a>") + | gsub($anchor_regex; "<a href=\"#" + .anchor + "\" id=\"" + .anchor + "\">" + .text + "</a>"); + + +def transform_anchors_strip: + . | gsub($empty_anchor_regex; "") + | gsub($anchor_regex; .text); + + +def map_contents_recursively(transformer): + . + { + Chapter: (.Chapter + { + content: .Chapter.content | transformer, + sub_items: .Chapter.sub_items | map(map_contents_recursively(transformer)), + }), + }; + + +def process_command: + .[0] as $context | + .[1] as $body | + $body + { + sections: $body.sections | map(map_contents_recursively(if $context.renderer == "html" then transform_anchors_html else transform_anchors_strip end)), + }; + +process_command diff --git a/doc/manual/book.toml b/doc/manual/book.toml index fee41dfb3..5f78a7614 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml @@ -1,2 +1,7 @@ [output.html] additional-css = ["custom.css"] +additional-js = ["redirects.js"] + +[preprocessor.anchors] +renderers = ["html"] +command = "jq --from-file doc/manual/anchors.jq" diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index 244cfa0c2..057719e34 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -1,99 +1,114 @@ -{ command, renderLinks ? false }: +{ toplevel }: with builtins; with import ./utils.nix; let - showCommand = - { command, def, filename }: - '' - **Warning**: This program is **experimental** and its interface is subject to change. - '' - + "# Name\n\n" - + "`${command}` - ${def.description}\n\n" - + "# Synopsis\n\n" - + showSynopsis { inherit command; args = def.args; } - + (if def.commands or {} != {} - then - let - categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands))); - listCommands = cmds: - concatStrings (map (name: - "* " - + (if renderLinks - then "[`${command} ${name}`](./${appendName filename name}.md)" - else "`${command} ${name}`") - + " - ${cmds.${name}.description}\n") - (attrNames cmds)); - in - "where *subcommand* is one of the following:\n\n" - # FIXME: group by category - + (if length categories > 1 - then - concatStrings (map - (cat: - "**${toString cat.description}:**\n\n" - + listCommands (filterAttrs (n: v: v.category == cat) def.commands) - + "\n" - ) categories) - + "\n" - else - listCommands def.commands - + "\n") - else "") - + (if def ? doc - then def.doc + "\n\n" - else "") - + (let s = showOptions def.flags; in - if s != "" - then "# Options\n\n${s}" - else "") - ; + showCommand = { command, details, filename, toplevel }: + let + result = '' + > **Warning** \ + > This program is **experimental** and its interface is subject to change. + + # Name + + `${command}` - ${details.description} + + # Synopsis + + ${showSynopsis command details.args} + + ${maybeSubcommands} + + ${maybeDocumentation} + + ${maybeOptions} + ''; + showSynopsis = command: args: + let + showArgument = arg: "*${arg.label}*" + (if arg ? arity then "" else "..."); + arguments = concatStringsSep " " (map showArgument args); + in '' + `${command}` [*option*...] ${arguments} + ''; + maybeSubcommands = if details ? commands && details.commands != {} + then '' + where *subcommand* is one of the following: + + ${subcommands} + '' + else ""; + subcommands = if length categories > 1 + then listCategories + else listSubcommands details.commands; + categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands))); + listCategories = concatStrings (map showCategory categories); + showCategory = cat: '' + **${toString cat.description}:** + + ${listSubcommands (filterAttrs (n: v: v.category == cat) details.commands)} + ''; + listSubcommands = cmds: concatStrings (attrValues (mapAttrs showSubcommand cmds)); + showSubcommand = name: subcmd: '' + * [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description} + ''; + maybeDocumentation = if details ? doc then details.doc else ""; + maybeOptions = if details.flags == {} then "" else '' + # Options + + ${showOptions details.flags toplevel.flags} + ''; + showOptions = options: commonOptions: + let + allOptions = options // commonOptions; + showCategory = cat: '' + ${if cat != "" then "**${cat}:**" else ""} + + ${listOptions (filterAttrs (n: v: v.category == cat) allOptions)} + ''; + listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts)); + showOption = name: option: + let + shortName = if option ? shortName then "/ `-${option.shortName}`" else ""; + labels = if option ? labels then (concatStringsSep " " (map (s: "*${s}*") option.labels)) else ""; + in trim '' + - `--${name}` ${shortName} ${labels} + + ${option.description} + ''; + categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues allOptions))); + in concatStrings (map showCategory categories); + in squash result; appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name; - showOptions = flags: + processCommand = { command, details, filename, toplevel }: let - categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues flags))); - in - concatStrings (map - (cat: - (if cat != "" - then "**${cat}:**\n\n" - else "") - + concatStrings - (map (longName: - let - flag = flags.${longName}; - in - " - `--${longName}`" - + (if flag ? shortName then " / `-${flag.shortName}`" else "") - + (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "") - + " \n" - + " " + flag.description + "\n\n" - ) (attrNames (filterAttrs (n: v: v.category == cat) flags)))) - categories); - - showSynopsis = - { command, args }: - "`${command}` [*option*...] ${concatStringsSep " " - (map (arg: "*${arg.label}*" + (if arg ? arity then "" else "...")) args)}\n\n"; - - processCommand = { command, def, filename }: - [ { name = filename + ".md"; value = showCommand { inherit command def filename; }; inherit command; } ] - ++ concatMap - (name: processCommand { - filename = appendName filename name; - command = command + " " + name; - def = def.commands.${name}; - }) - (attrNames def.commands or {}); - -in + cmd = { + inherit command; + name = filename + ".md"; + value = showCommand { inherit command details filename toplevel; }; + }; + subcommand = subCmd: processCommand { + command = command + " " + subCmd; + details = details.commands.${subCmd}; + filename = appendName filename subCmd; + inherit toplevel; + }; + in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {}); -let - manpages = processCommand { filename = "nix"; command = "nix"; def = builtins.fromJSON command; }; - summary = concatStrings (map (manpage: " - [${manpage.command}](command-ref/new-cli/${manpage.name})\n") manpages); -in -(listToAttrs manpages) // { "SUMMARY.md" = summary; } + parsedToplevel = builtins.fromJSON toplevel; + manpages = processCommand { + command = "nix"; + details = parsedToplevel; + filename = "nix"; + toplevel = parsedToplevel; + }; + + tableOfContents = let + showEntry = page: + " - [${page.command}](command-ref/new-cli/${page.name})"; + in concatStringsSep "\n" (map showEntry manpages) + "\n"; + +in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; } diff --git a/doc/manual/generate-options.nix b/doc/manual/generate-options.nix index 84d90beb6..680b709c8 100644 --- a/doc/manual/generate-options.nix +++ b/doc/manual/generate-options.nix @@ -6,20 +6,21 @@ options: concatStrings (map (name: let option = options.${name}; in - " - `${name}` \n\n" + " - [`${name}`](#conf-${name})" + + "<p id=\"conf-${name}\"></p>\n\n" + concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n" + (if option.documentDefault then " **Default:** " + ( - if option.value == "" || option.value == [] + if option.defaultValue == "" || option.defaultValue == [] then "*empty*" - else if isBool option.value - then (if option.value then "`true`" else "`false`") + else if isBool option.defaultValue + then (if option.defaultValue then "`true`" else "`false`") else # n.b. a StringMap value type is specified as a string, but # this shows the value type. The empty stringmap is "null" in # JSON, but that converts to "{ }" here. - (if isAttrs option.value then "`\"\"`" - else "`" + toString option.value + "`")) + "\n\n" + (if isAttrs option.defaultValue then "`\"\"`" + else "`" + toString option.defaultValue + "`")) + "\n\n" else " **Default:** *machine-specific*\n") + (if option.aliases != [] then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n" diff --git a/doc/manual/local.mk b/doc/manual/local.mk index c1ce8aaeb..486dbd7a2 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -1,5 +1,9 @@ ifeq ($(doc_generate),yes) +MANUAL_SRCS := \ + $(call rwildcard, $(d)/src, *.md) \ + $(call rwildcard, $(d)/src, */*.md) + # Generate man pages. man-pages := $(foreach n, \ nix-env.1 nix-build.1 nix-shell.1 nix-store.1 nix-instantiate.1 \ @@ -46,7 +50,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix @rm -rf $@ - $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; renderLinks = true; }' + $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }' $(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix @cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp @@ -61,10 +65,10 @@ $(d)/conf-file.json: $(bindir)/nix $(trace-gen) $(dummy-env) $(bindir)/nix show-config --json --experimental-features nix-command > $@.tmp @mv $@.tmp $@ -$(d)/src/expressions/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/expressions/builtins-prefix.md $(bindir)/nix - @cat doc/manual/src/expressions/builtins-prefix.md > $@.tmp +$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix + @cat doc/manual/src/language/builtins-prefix.md > $@.tmp $(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp - @cat doc/manual/src/expressions/builtins-suffix.md >> $@.tmp + @cat doc/manual/src/language/builtins-suffix.md >> $@.tmp @mv $@.tmp $@ $(d)/builtins.json: $(bindir)/nix @@ -92,12 +96,12 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli if [[ $$name = SUMMARY ]]; then continue; fi; \ printf "Title: %s\n\n" "$$name" > $$tmpFile; \ cat $$i >> $$tmpFile; \ - lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \ + lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \ rm $$tmpFile; \ done @touch $@ -$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md $(call rwildcard, $(d)/src, *.md) +$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md $(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual endif diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js new file mode 100644 index 000000000..167e221b8 --- /dev/null +++ b/doc/manual/redirects.js @@ -0,0 +1,330 @@ +// Redirects from old DocBook manual. +var redirects = { + "#part-advanced-topics": "advanced-topics/advanced-topics.html", + "#chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html", + "#chap-diff-hook": "advanced-topics/diff-hook.html", + "#check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered", + "#chap-distributed-builds": "advanced-topics/distributed-builds.html", + "#chap-post-build-hook": "advanced-topics/post-build-hook.html", + "#chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", + "#part-command-ref": "command-ref/command-ref.html", + "#conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", + "#conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", + "#conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris", + "#conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users", + "#conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store", + "#conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys", + "#conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches", + "#conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log", + "#conf-build-cores": "command-ref/conf-file.html#conf-build-cores", + "#conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs", + "#conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths", + "#conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback", + "#conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs", + "#conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size", + "#conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time", + "#conf-build-repeat": "command-ref/conf-file.html#conf-build-repeat", + "#conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout", + "#conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot", + "#conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox", + "#conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes", + "#conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group", + "#conf-builders": "command-ref/conf-file.html#conf-builders", + "#conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes", + "#conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log", + "#conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout", + "#conf-cores": "command-ref/conf-file.html#conf-cores", + "#conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook", + "#conf-enforce-determinism": "command-ref/conf-file.html#conf-enforce-determinism", + "#conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations", + "#conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches", + "#conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms", + "#conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths", + "#conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters", + "#conf-fallback": "command-ref/conf-file.html#conf-fallback", + "#conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata", + "#conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations", + "#conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs", + "#conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors", + "#conf-http-connections": "command-ref/conf-file.html#conf-http-connections", + "#conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log", + "#conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations", + "#conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations", + "#conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs", + "#conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size", + "#conf-max-free": "command-ref/conf-file.html#conf-max-free", + "#conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs", + "#conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time", + "#conf-min-free": "command-ref/conf-file.html#conf-min-free", + "#conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl", + "#conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl", + "#conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file", + "#conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files", + "#conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook", + "#conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook", + "#conf-repeat": "command-ref/conf-file.html#conf-repeat", + "#conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs", + "#conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval", + "#conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook", + "#conf-sandbox": "command-ref/conf-file.html#conf-sandbox", + "#conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size", + "#conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths", + "#conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files", + "#conf-show-trace": "command-ref/conf-file.html#conf-show-trace", + "#conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout", + "#conf-substitute": "command-ref/conf-file.html#conf-substitute", + "#conf-substituters": "command-ref/conf-file.html#conf-substituters", + "#conf-system": "command-ref/conf-file.html#conf-system", + "#conf-system-features": "command-ref/conf-file.html#conf-system-features", + "#conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl", + "#conf-timeout": "command-ref/conf-file.html#conf-timeout", + "#conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls", + "#conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches", + "#conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys", + "#conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters", + "#conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users", + "#extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths", + "#sec-conf-file": "command-ref/conf-file.html", + "#env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH", + "#env-common": "command-ref/env-common.html", + "#envar-remote": "command-ref/env-common.html#env-NIX_REMOTE", + "#sec-common-env": "command-ref/env-common.html", + "#ch-files": "command-ref/files.html", + "#ch-main-commands": "command-ref/main-commands.html", + "#opt-out-link": "command-ref/nix-build.html#opt-out-link", + "#sec-nix-build": "command-ref/nix-build.html", + "#sec-nix-channel": "command-ref/nix-channel.html", + "#sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html", + "#sec-nix-copy-closure": "command-ref/nix-copy-closure.html", + "#sec-nix-daemon": "command-ref/nix-daemon.html", + "#refsec-nix-env-install-examples": "command-ref/nix-env.html#examples", + "#rsec-nix-env-install": "command-ref/nix-env.html#operation---install", + "#rsec-nix-env-set": "command-ref/nix-env.html#operation---set", + "#rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag", + "#rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade", + "#sec-nix-env": "command-ref/nix-env.html", + "#ssec-version-comparisons": "command-ref/nix-env.html#versions", + "#sec-nix-hash": "command-ref/nix-hash.html", + "#sec-nix-instantiate": "command-ref/nix-instantiate.html", + "#sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html", + "#sec-nix-shell": "command-ref/nix-shell.html", + "#ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter", + "#nixref-queries": "command-ref/nix-store.html#queries", + "#opt-add-root": "command-ref/nix-store.html#opt-add-root", + "#refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump", + "#refsec-nix-store-export": "command-ref/nix-store.html#operation---export", + "#refsec-nix-store-import": "command-ref/nix-store.html#operation---import", + "#refsec-nix-store-query": "command-ref/nix-store.html#operation---query", + "#refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify", + "#rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc", + "#rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key", + "#rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise", + "#rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve", + "#sec-nix-store": "command-ref/nix-store.html", + "#opt-I": "command-ref/opt-common.html#opt-I", + "#opt-attr": "command-ref/opt-common.html#opt-attr", + "#opt-common": "command-ref/opt-common.html", + "#opt-cores": "command-ref/opt-common.html#opt-cores", + "#opt-log-format": "command-ref/opt-common.html#opt-log-format", + "#opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs", + "#opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time", + "#opt-timeout": "command-ref/opt-common.html#opt-timeout", + "#sec-common-options": "command-ref/opt-common.html", + "#ch-utilities": "command-ref/utilities.html", + "#chap-hacking": "contributing/hacking.html", + "#adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes", + "#adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences", + "#adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites", + "#adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences", + "#adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites", + "#adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph", + "#adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars", + "#adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash", + "#adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo", + "#adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode", + "#adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile", + "#adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild", + "#fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash", + "#sec-advanced-attributes": "language/advanced-attributes.html", + "#builtin-abort": "language/builtins.html#builtins-abort", + "#builtin-add": "language/builtins.html#builtins-add", + "#builtin-all": "language/builtins.html#builtins-all", + "#builtin-any": "language/builtins.html#builtins-any", + "#builtin-attrNames": "language/builtins.html#builtins-attrNames", + "#builtin-attrValues": "language/builtins.html#builtins-attrValues", + "#builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf", + "#builtin-bitAnd": "language/builtins.html#builtins-bitAnd", + "#builtin-bitOr": "language/builtins.html#builtins-bitOr", + "#builtin-bitXor": "language/builtins.html#builtins-bitXor", + "#builtin-builtins": "language/builtins.html#builtins-builtins", + "#builtin-compareVersions": "language/builtins.html#builtins-compareVersions", + "#builtin-concatLists": "language/builtins.html#builtins-concatLists", + "#builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep", + "#builtin-currentSystem": "language/builtins.html#builtins-currentSystem", + "#builtin-deepSeq": "language/builtins.html#builtins-deepSeq", + "#builtin-derivation": "language/builtins.html#builtins-derivation", + "#builtin-dirOf": "language/builtins.html#builtins-dirOf", + "#builtin-div": "language/builtins.html#builtins-div", + "#builtin-elem": "language/builtins.html#builtins-elem", + "#builtin-elemAt": "language/builtins.html#builtins-elemAt", + "#builtin-fetchGit": "language/builtins.html#builtins-fetchGit", + "#builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball", + "#builtin-fetchurl": "language/builtins.html#builtins-fetchurl", + "#builtin-filterSource": "language/builtins.html#builtins-filterSource", + "#builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime", + "#builtin-fromJSON": "language/builtins.html#builtins-fromJSON", + "#builtin-functionArgs": "language/builtins.html#builtins-functionArgs", + "#builtin-genList": "language/builtins.html#builtins-genList", + "#builtin-getAttr": "language/builtins.html#builtins-getAttr", + "#builtin-getEnv": "language/builtins.html#builtins-getEnv", + "#builtin-hasAttr": "language/builtins.html#builtins-hasAttr", + "#builtin-hashFile": "language/builtins.html#builtins-hashFile", + "#builtin-hashString": "language/builtins.html#builtins-hashString", + "#builtin-head": "language/builtins.html#builtins-head", + "#builtin-import": "language/builtins.html#builtins-import", + "#builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs", + "#builtin-isAttrs": "language/builtins.html#builtins-isAttrs", + "#builtin-isBool": "language/builtins.html#builtins-isBool", + "#builtin-isFloat": "language/builtins.html#builtins-isFloat", + "#builtin-isFunction": "language/builtins.html#builtins-isFunction", + "#builtin-isInt": "language/builtins.html#builtins-isInt", + "#builtin-isList": "language/builtins.html#builtins-isList", + "#builtin-isNull": "language/builtins.html#builtins-isNull", + "#builtin-isString": "language/builtins.html#builtins-isString", + "#builtin-length": "language/builtins.html#builtins-length", + "#builtin-lessThan": "language/builtins.html#builtins-lessThan", + "#builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs", + "#builtin-map": "language/builtins.html#builtins-map", + "#builtin-match": "language/builtins.html#builtins-match", + "#builtin-mul": "language/builtins.html#builtins-mul", + "#builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName", + "#builtin-path": "language/builtins.html#builtins-path", + "#builtin-pathExists": "language/builtins.html#builtins-pathExists", + "#builtin-placeholder": "language/builtins.html#builtins-placeholder", + "#builtin-readDir": "language/builtins.html#builtins-readDir", + "#builtin-readFile": "language/builtins.html#builtins-readFile", + "#builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs", + "#builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings", + "#builtin-seq": "language/builtins.html#builtins-seq", + "#builtin-sort": "language/builtins.html#builtins-sort", + "#builtin-split": "language/builtins.html#builtins-split", + "#builtin-splitVersion": "language/builtins.html#builtins-splitVersion", + "#builtin-stringLength": "language/builtins.html#builtins-stringLength", + "#builtin-sub": "language/builtins.html#builtins-sub", + "#builtin-substring": "language/builtins.html#builtins-substring", + "#builtin-tail": "language/builtins.html#builtins-tail", + "#builtin-throw": "language/builtins.html#builtins-throw", + "#builtin-toFile": "language/builtins.html#builtins-toFile", + "#builtin-toJSON": "language/builtins.html#builtins-toJSON", + "#builtin-toPath": "language/builtins.html#builtins-toPath", + "#builtin-toString": "language/builtins.html#builtins-toString", + "#builtin-toXML": "language/builtins.html#builtins-toXML", + "#builtin-trace": "language/builtins.html#builtins-trace", + "#builtin-tryEval": "language/builtins.html#builtins-tryEval", + "#builtin-typeOf": "language/builtins.html#builtins-typeOf", + "#ssec-builtins": "language/builtins.html", + "#attr-system": "language/derivations.html#attr-system", + "#ssec-derivation": "language/derivations.html", + "#ch-expression-language": "language/index.html", + "#sec-constructs": "language/constructs.html", + "#sect-let-language": "language/constructs.html#let-language", + "#ss-functions": "language/constructs.html#functions", + "#sec-language-operators": "language/operators.html", + "#table-operators": "language/operators.html", + "#ssec-values": "language/values.html", + "#gloss-closure": "glossary.html#gloss-closure", + "#gloss-derivation": "glossary.html#gloss-derivation", + "#gloss-deriver": "glossary.html#gloss-deriver", + "#gloss-nar": "glossary.html#gloss-nar", + "#gloss-output-path": "glossary.html#gloss-output-path", + "#gloss-profile": "glossary.html#gloss-profile", + "#gloss-reachable": "glossary.html#gloss-reachable", + "#gloss-reference": "glossary.html#gloss-reference", + "#gloss-substitute": "glossary.html#gloss-substitute", + "#gloss-user-env": "glossary.html#gloss-user-env", + "#gloss-validity": "glossary.html#gloss-validity", + "#part-glossary": "glossary.html", + "#sec-building-source": "installation/building-source.html", + "#ch-env-variables": "installation/env-variables.html", + "#sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables", + "#sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file", + "#sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon", + "#chap-installation": "installation/installation.html", + "#ch-installing-binary": "installation/installing-binary.html", + "#sect-macos-installation": "installation/installing-binary.html#macos-installation", + "#sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation", + "#sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation", + "#sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation", + "#sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation", + "#sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation", + "#sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball", + "#sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url", + "#sect-single-user-installation": "installation/installing-binary.html#single-user-installation", + "#ch-installing-source": "installation/installing-source.html", + "#ssec-multi-user": "installation/multi-user.html", + "#ch-nix-security": "installation/nix-security.html", + "#sec-obtaining-source": "installation/obtaining-source.html", + "#sec-prerequisites-source": "installation/prerequisites-source.html", + "#sec-single-user": "installation/single-user.html", + "#ch-supported-platforms": "installation/supported-platforms.html", + "#ch-upgrading-nix": "installation/upgrading.html", + "#ch-about-nix": "introduction.html", + "#chap-introduction": "introduction.html", + "#ch-basic-package-mgmt": "package-management/basic-package-mgmt.html", + "#ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html", + "#sec-channels": "package-management/channels.html", + "#ssec-copy-closure": "package-management/copy-closure.html", + "#sec-garbage-collection": "package-management/garbage-collection.html", + "#ssec-gc-roots": "package-management/garbage-collector-roots.html", + "#chap-package-management": "package-management/package-management.html", + "#sec-profiles": "package-management/profiles.html", + "#ssec-s3-substituter": "package-management/s3-substituter.html", + "#ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache", + "#ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache", + "#ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache", + "#sec-sharing-packages": "package-management/sharing-packages.html", + "#ssec-ssh-substituter": "package-management/ssh-substituter.html", + "#chap-quick-start": "quick-start.html", + "#sec-relnotes": "release-notes/release-notes.html", + "#ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html", + "#ch-relnotes-0.10": "release-notes/rl-0.10.html", + "#ssec-relnotes-0.11": "release-notes/rl-0.11.html", + "#ssec-relnotes-0.12": "release-notes/rl-0.12.html", + "#ssec-relnotes-0.13": "release-notes/rl-0.13.html", + "#ssec-relnotes-0.14": "release-notes/rl-0.14.html", + "#ssec-relnotes-0.15": "release-notes/rl-0.15.html", + "#ssec-relnotes-0.16": "release-notes/rl-0.16.html", + "#ch-relnotes-0.5": "release-notes/rl-0.5.html", + "#ch-relnotes-0.6": "release-notes/rl-0.6.html", + "#ch-relnotes-0.7": "release-notes/rl-0.7.html", + "#ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html", + "#ch-relnotes-0.8": "release-notes/rl-0.8.html", + "#ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html", + "#ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html", + "#ch-relnotes-0.9": "release-notes/rl-0.9.html", + "#ssec-relnotes-1.0": "release-notes/rl-1.0.html", + "#ssec-relnotes-1.1": "release-notes/rl-1.1.html", + "#ssec-relnotes-1.10": "release-notes/rl-1.10.html", + "#ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html", + "#ssec-relnotes-1.11": "release-notes/rl-1.11.html", + "#ssec-relnotes-1.2": "release-notes/rl-1.2.html", + "#ssec-relnotes-1.3": "release-notes/rl-1.3.html", + "#ssec-relnotes-1.4": "release-notes/rl-1.4.html", + "#ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html", + "#ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html", + "#ssec-relnotes-1.5": "release-notes/rl-1.5.html", + "#ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html", + "#ssec-relnotes-1.6.0": "release-notes/rl-1.6.html", + "#ssec-relnotes-1.7": "release-notes/rl-1.7.html", + "#ssec-relnotes-1.8": "release-notes/rl-1.8.html", + "#ssec-relnotes-1.9": "release-notes/rl-1.9.html", + "#ssec-relnotes-2.0": "release-notes/rl-2.0.html", + "#ssec-relnotes-2.1": "release-notes/rl-2.1.html", + "#ssec-relnotes-2.2": "release-notes/rl-2.2.html", + "#ssec-relnotes-2.3": "release-notes/rl-2.3.html" +}; + +var isRoot = (document.location.pathname.endsWith('/') || document.location.pathname.endsWith('/index.html')) && path_to_root === ''; +if (isRoot && redirects[document.location.hash]) { + document.location.href = path_to_root + redirects[document.location.hash]; +} diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 4e2afa20e..908e7e3d9 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -26,21 +26,14 @@ - [Copying Closures via SSH](package-management/copy-closure.md) - [Serving a Nix store via SSH](package-management/ssh-substituter.md) - [Serving a Nix store via S3](package-management/s3-substituter.md) -- [Writing Nix Expressions](expressions/writing-nix-expressions.md) - - [A Simple Nix Expression](expressions/simple-expression.md) - - [Expression Syntax](expressions/expression-syntax.md) - - [Build Script](expressions/build-script.md) - - [Arguments and Variables](expressions/arguments-variables.md) - - [Building and Testing](expressions/simple-building-testing.md) - - [Generic Builder Syntax](expressions/generic-builder.md) - - [Writing Nix Expressions](expressions/expression-language.md) - - [Values](expressions/language-values.md) - - [Language Constructs](expressions/language-constructs.md) - - [Operators](expressions/language-operators.md) - - [Derivations](expressions/derivations.md) - - [Advanced Attributes](expressions/advanced-attributes.md) - - [Built-in Constants](expressions/builtin-constants.md) - - [Built-in Functions](expressions/builtins.md) +- [Nix Language](language/index.md) + - [Data Types](language/values.md) + - [Language Constructs](language/constructs.md) + - [Operators](language/operators.md) + - [Derivations](language/derivations.md) + - [Advanced Attributes](language/advanced-attributes.md) + - [Built-in Constants](language/builtin-constants.md) + - [Built-in Functions](language/builtins.md) - [Advanced Topics](advanced-topics/advanced-topics.md) - [Remote Builds](advanced-topics/distributed-builds.md) - [Tuning Cores and Jobs](advanced-topics/cores-vs-jobs.md) @@ -72,6 +65,11 @@ - [CLI guideline](contributing/cli-guideline.md) - [Release Notes](release-notes/release-notes.md) - [Release X.Y (202?-??-??)](release-notes/rl-next.md) + - [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md) + - [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md) + - [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md) + - [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md) + - [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md) - [Release 2.6 (2022-01-24)](release-notes/rl-2.6.md) - [Release 2.5 (2021-12-13)](release-notes/rl-2.5.md) - [Release 2.4 (2021-11-01)](release-notes/rl-2.4.md) diff --git a/doc/manual/src/advanced-topics/diff-hook.md b/doc/manual/src/advanced-topics/diff-hook.md index 7a2622b3d..161e64b2a 100644 --- a/doc/manual/src/advanced-topics/diff-hook.md +++ b/doc/manual/src/advanced-topics/diff-hook.md @@ -101,7 +101,7 @@ In particular, notice the `/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable.check` output. Nix has copied the build results to that directory where you can examine it. -> **Note** +> []{#check-dirs-are-unregistered} **Note** > > Check paths are not protected against garbage collection, and this > path will be deleted on the next garbage collection. diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md index c4c60db15..fefd10100 100644 --- a/doc/manual/src/advanced-topics/distributed-builds.md +++ b/doc/manual/src/advanced-topics/distributed-builds.md @@ -12,14 +12,14 @@ machine is accessible via SSH and that it has Nix installed. You can test whether connecting to the remote Nix instance works, e.g. ```console -$ nix ping-store --store ssh://mac +$ nix store ping --store ssh://mac ``` will try to connect to the machine named `mac`. It is possible to specify an SSH identity file as part of the remote store URI, e.g. ```console -$ nix ping-store --store ssh://mac?ssh-key=/home/alice/my-key +$ nix store ping --store ssh://mac?ssh-key=/home/alice/my-key ``` Since builds should be non-interactive, the key should not have a @@ -110,7 +110,7 @@ default, set it to `-`. 7. A comma-separated list of *mandatory features*. A machine will only be used to build a derivation if all of the machine’s mandatory features appear in the derivation’s `requiredSystemFeatures` - attribute.. + attribute. 8. The (base64-encoded) public host key of the remote machine. If omitted, SSH will use its regular known-hosts file. Specifically, the field is calculated diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/src/command-ref/env-common.md index 6e2403461..3f3eb6915 100644 --- a/doc/manual/src/command-ref/env-common.md +++ b/doc/manual/src/command-ref/env-common.md @@ -2,11 +2,11 @@ Most Nix commands interpret the following environment variables: - - `IN_NIX_SHELL`\ + - [`IN_NIX_SHELL`]{#env-IN_NIX_SHELL}\ Indicator that tells if the current environment was set up by - `nix-shell`. Since Nix 2.0 the values are `"pure"` and `"impure"` + `nix-shell`. It can have the values `pure` or `impure`. - - `NIX_PATH`\ + - [`NIX_PATH`]{#env-NIX_PATH}\ A colon-separated list of directories used to look up Nix expressions enclosed in angle brackets (i.e., `<path>`). For instance, the value @@ -44,7 +44,7 @@ Most Nix commands interpret the following environment variables: The Nix search path can also be extended using the `-I` option to many Nix commands, which takes precedence over `NIX_PATH`. - - `NIX_IGNORE_SYMLINK_STORE`\ + - [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\ Normally, the Nix store directory (typically `/nix/store`) is not allowed to contain any symlink components. This is to prevent “impure” builds. Builders sometimes “canonicalise” paths by @@ -66,41 +66,41 @@ Most Nix commands interpret the following environment variables: Consult the mount 8 manual page for details. - - `NIX_STORE_DIR`\ + - [`NIX_STORE_DIR`]{#env-NIX_STORE_DIR}\ Overrides the location of the Nix store (default `prefix/store`). - - `NIX_DATA_DIR`\ + - [`NIX_DATA_DIR`]{#env-NIX_DATA_DIR}\ Overrides the location of the Nix static data directory (default `prefix/share`). - - `NIX_LOG_DIR`\ + - [`NIX_LOG_DIR`]{#env-NIX_LOG_DIR}\ Overrides the location of the Nix log directory (default `prefix/var/log/nix`). - - `NIX_STATE_DIR`\ + - [`NIX_STATE_DIR`]{#env-NIX_STATE_DIR}\ Overrides the location of the Nix state directory (default `prefix/var/nix`). - - `NIX_CONF_DIR`\ + - [`NIX_CONF_DIR`]{#env-NIX_CONF_DIR}\ Overrides the location of the system Nix configuration directory (default `prefix/etc/nix`). - - `NIX_CONFIG`\ + - [`NIX_CONFIG`]{#env-NIX_CONFIG}\ Applies settings from Nix configuration from the environment. The content is treated as if it was read from a Nix configuration file. Settings are separated by the newline character. - - `NIX_USER_CONF_FILES`\ + - [`NIX_USER_CONF_FILES`]{#env-NIX_USER_CONF_FILES}\ Overrides the location of the user Nix configuration files to load from (defaults to the XDG spec locations). The variable is treated as a list separated by the `:` token. - - `TMPDIR`\ + - [`TMPDIR`]{#env-TMPDIR}\ Use the specified directory to store temporary files. In particular, this includes temporary build directories; these can take up substantial amounts of disk space. The default is `/tmp`. - - `NIX_REMOTE`\ + - [`NIX_REMOTE`]{#env-NIX_REMOTE}\ This variable should be set to `daemon` if you want to use the Nix daemon to execute Nix operations. This is necessary in [multi-user Nix installations](../installation/multi-user.md). If the Nix @@ -108,16 +108,16 @@ Most Nix commands interpret the following environment variables: should be set to `unix://path/to/socket`. Otherwise, it should be left unset. - - `NIX_SHOW_STATS`\ + - [`NIX_SHOW_STATS`]{#env-NIX_SHOW_STATS}\ If set to `1`, Nix will print some evaluation statistics, such as the number of values allocated. - - `NIX_COUNT_CALLS`\ + - [`NIX_COUNT_CALLS`]{#env-NIX_COUNT_CALLS}\ If set to `1`, Nix will print how often functions were called during Nix expression evaluation. This is useful for profiling your Nix expressions. - - `GC_INITIAL_HEAP_SIZE`\ + - [`GC_INITIAL_HEAP_SIZE`]{#env-GC_INITIAL_HEAP_SIZE}\ If Nix has been configured to use the Boehm garbage collector, this variable sets the initial size of the heap in bytes. It defaults to 384 MiB. Setting it to a low value reduces memory consumption, but diff --git a/doc/manual/src/command-ref/nix-build.md b/doc/manual/src/command-ref/nix-build.md index 43de7a6e6..49c6f3f55 100644 --- a/doc/manual/src/command-ref/nix-build.md +++ b/doc/manual/src/command-ref/nix-build.md @@ -12,6 +12,12 @@ [`--dry-run`] [{`--out-link` | `-o`} *outlink*] +# Disambiguation + +This man page describes the command `nix-build`, which is distinct from `nix +build`. For documentation on the latter, run `nix build --help` or see `man +nix3-build`. + # Description The `nix-build` command builds the derivations described by the Nix @@ -47,16 +53,16 @@ All options not listed here are passed to `nix-store --realise`, except for `--arg` and `--attr` / `-A` which are passed to `nix-instantiate`. - - `--no-out-link`\ + - [`--no-out-link`]{#opt-no-out-link}\ Do not create a symlink to the output path. Note that as a result the output does not become a root of the garbage collector, and so might be deleted by `nix-store --gc`. - - `--dry-run`\ + - [`--dry-run`]{#opt-dry-run}\ Show what store paths would be built or downloaded. - - `--out-link` / `-o` *outlink*\ + - [`--out-link`]{#opt-out-link} / `-o` *outlink*\ Change the name of the symlink to the output path created from `result` to *outlink*. diff --git a/doc/manual/src/command-ref/nix-copy-closure.md b/doc/manual/src/command-ref/nix-copy-closure.md index 7047d3012..9a29030bd 100644 --- a/doc/manual/src/command-ref/nix-copy-closure.md +++ b/doc/manual/src/command-ref/nix-copy-closure.md @@ -30,8 +30,8 @@ Since `nix-copy-closure` calls `ssh`, you may be asked to type in the appropriate password or passphrase. In fact, you may be asked _twice_ because `nix-copy-closure` currently connects twice to the remote machine, first to get the set of paths missing on the target machine, -and second to send the dump of those paths. If this bothers you, use -`ssh-agent`. +and second to send the dump of those paths. When using public key +authentication, you can avoid typing the passphrase with `ssh-agent`. # Options diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/src/command-ref/nix-env.md index 8d6abaf52..a5df35d77 100644 --- a/doc/manual/src/command-ref/nix-env.md +++ b/doc/manual/src/command-ref/nix-env.md @@ -31,7 +31,7 @@ subcommand to be performed. These are documented below. Several commands, such as `nix-env -q` and `nix-env -i`, take a list of arguments that specify the packages on which to operate. These are extended regular expressions that must match the entire name of the -package. (For details on regular expressions, see regex7.) The match is +package. (For details on regular expressions, see **regex**(7).) The match is case-sensitive. The regular expression can optionally be followed by a dash and a version number; if omitted, any version of the package will match. Here are some examples: @@ -198,7 +198,7 @@ a number of possible ways: another. - If `--from-expression` is given, *args* are Nix - [functions](../expressions/language-constructs.md#functions) + [functions](../language/constructs.md#functions) that are called with the active Nix expression as their single argument. The derivations returned by those function calls are installed. This allows derivations to be specified in an @@ -412,7 +412,7 @@ The upgrade operation determines whether a derivation `y` is an upgrade of a derivation `x` by looking at their respective `name` attributes. The names (e.g., `gcc-3.3.1` are split into two parts: the package name (`gcc`), and the version (`3.3.1`). The version part starts after the -first dash not followed by a letter. `x` is considered an upgrade of `y` +first dash not followed by a letter. `y` is considered an upgrade of `x` if their package names match, and the version of `y` is higher than that of `x`. diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index 2e198daed..8f143729e 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -51,7 +51,7 @@ standard input. - `--strict`\ When used with `--eval`, recursively evaluate list elements and attributes. Normally, such sub-expressions are left unevaluated - (since the Nix expression language is lazy). + (since the Nix language is lazy). > **Warning** > @@ -66,7 +66,7 @@ standard input. When used with `--eval`, print the resulting value as an XML representation of the abstract syntax tree rather than as an ATerm. The schema is the same as that used by the [`toXML` - built-in](../expressions/builtins.md). + built-in](../language/builtins.md). - `--read-write-mode`\ When used with `--eval`, perform evaluation in read/write mode so diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/src/command-ref/nix-shell.md index a2b6d8a8e..840bccd25 100644 --- a/doc/manual/src/command-ref/nix-shell.md +++ b/doc/manual/src/command-ref/nix-shell.md @@ -15,6 +15,12 @@ [`--keep` *name*] {{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]} +# Disambiguation + +This man page describes the command `nix-shell`, which is distinct from `nix +shell`. For documentation on the latter, run `nix shell --help` or see `man +nix3-shell`. + # Description The command `nix-shell` will build the dependencies of the specified diff --git a/doc/manual/src/command-ref/nix-store.md b/doc/manual/src/command-ref/nix-store.md index 7db9f0c1c..ecd838e8d 100644 --- a/doc/manual/src/command-ref/nix-store.md +++ b/doc/manual/src/command-ref/nix-store.md @@ -22,7 +22,7 @@ This section lists the options that are common to all operations. These options are allowed for every subcommand, though they may not always have an effect. - - `--add-root` *path*\ + - [`--add-root`]{#opt-add-root} *path*\ Causes the result of a realisation (`--realise` and `--force-realise`) to be registered as a root of the garbage collector. *path* will be created as a symlink to the resulting @@ -121,7 +121,7 @@ Special exit codes: - `102`\ Hash mismatch, the build output was rejected because it does not match the [`outputHash` attribute of the - derivation](../expressions/advanced-attributes.md). + derivation](../language/advanced-attributes.md). - `104`\ Not deterministic, the build succeeded in check mode but the diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md index 7ee1a26bc..e612c416f 100644 --- a/doc/manual/src/command-ref/opt-common.md +++ b/doc/manual/src/command-ref/opt-common.md @@ -2,13 +2,13 @@ Most Nix commands accept the following command-line options: - - `--help`\ + - [`--help`]{#opt-help}\ Prints out a summary of the command syntax and exits. - - `--version`\ + - [`--version`]{#opt-version}\ Prints out the Nix version number on standard output and exits. - - `--verbose` / `-v`\ + - [`--verbose`]{#opt-verbose} / `-v`\ Increases the level of verbosity of diagnostic messages printed on standard error. For each Nix operation, the information printed on standard output is well-defined; any diagnostic information is @@ -37,14 +37,14 @@ Most Nix commands accept the following command-line options: - 5\ “Vomit”: print vast amounts of debug information. - - `--quiet`\ + - [`--quiet`]{#opt-quiet}\ Decreases the level of verbosity of diagnostic messages printed on standard error. This is the inverse option to `-v` / `--verbose`. This option may be specified repeatedly. See the previous verbosity levels list. - - `--log-format` *format*\ + - [`--log-format`]{#opt-log-format} *format*\ This option can be used to change the output of the log format, with *format* being one of: @@ -66,14 +66,14 @@ Most Nix commands accept the following command-line options: - bar-with-logs\ Display the raw logs, with the progress bar at the bottom. - - `--no-build-output` / `-Q`\ + - [`--no-build-output`]{#opt-no-build-output} / `-Q`\ By default, output written by builders to standard output and standard error is echoed to the Nix command's standard error. This option suppresses this behaviour. Note that the builder's standard output and error are always written to a log file in `prefix/nix/var/log/nix`. - - `--max-jobs` / `-j` *number*\ + - [`--max-jobs`]{#opt-max-jobs} / `-j` *number*\ Sets the maximum number of build jobs that Nix will perform in parallel to the specified number. Specify `auto` to use the number of CPUs in the system. The default is specified by the `max-jobs` @@ -83,7 +83,7 @@ Most Nix commands accept the following command-line options: Setting it to `0` disallows building on the local machine, which is useful when you want builds to happen only on remote builders. - - `--cores`\ + - [`--cores`]{#opt-cores}\ Sets the value of the `NIX_BUILD_CORES` environment variable in the invocation of builders. Builders can use this variable at their discretion to control the maximum amount of parallelism. For @@ -94,18 +94,18 @@ Most Nix commands accept the following command-line options: means that the builder should use all available CPU cores in the system. - - `--max-silent-time`\ + - [`--max-silent-time`]{#opt-max-silent-time}\ Sets the maximum number of seconds that a builder can go without producing any data on standard output or standard error. The default is specified by the `max-silent-time` configuration setting. `0` means no time-out. - - `--timeout`\ + - [`--timeout`]{#opt-timeout}\ Sets the maximum number of seconds that a builder can run. The default is specified by the `timeout` configuration setting. `0` means no timeout. - - `--keep-going` / `-k`\ + - [`--keep-going`]{#opt-keep-going} / `-k`\ Keep going in case of failed builds, to the greatest extent possible. That is, if building an input of some derivation fails, Nix will still build the other inputs, but not the derivation @@ -113,13 +113,13 @@ Most Nix commands accept the following command-line options: for builds of substitutes), possibly killing builds in progress (in case of parallel or distributed builds). - - `--keep-failed` / `-K`\ + - [`--keep-failed`]{#opt-keep-failed} / `-K`\ Specifies that in case of a build failure, the temporary directory (usually in `/tmp`) in which the build takes place should not be deleted. The path of the build directory is printed as an informational message. - - `--fallback`\ + - [`--fallback`]{#opt-fallback}\ Whenever Nix attempts to build a derivation for which substitutes are known for each output path, but realising the output paths through the substitutes fails, fall back on building the derivation. @@ -134,18 +134,18 @@ Most Nix commands accept the following command-line options: failure in obtaining the substitutes to lead to a full build from source (with the related consumption of resources). - - `--readonly-mode`\ + - [`--readonly-mode`]{#opt-readonly-mode}\ When this option is used, no attempt is made to open the Nix database. Most Nix operations do need database access, so those operations will fail. - - `--arg` *name* *value*\ + - [`--arg`]{#opt-arg} *name* *value*\ This option is accepted by `nix-env`, `nix-instantiate`, `nix-shell` and `nix-build`. When evaluating Nix expressions, the expression evaluator will automatically try to call functions that it encounters. It can automatically call functions for which every argument has a [default - value](../expressions/language-constructs.md#functions) (e.g., + value](../language/constructs.md#functions) (e.g., `{ argName ? defaultValue }: ...`). With `--arg`, you can also call functions that have arguments without a default value (or override a default value). That is, if the evaluator encounters a @@ -164,19 +164,19 @@ Most Nix commands accept the following command-line options: So if you call this Nix expression (e.g., when you do `nix-env -iA pkgname`), the function will be called automatically using the - value [`builtins.currentSystem`](../expressions/builtins.md) for + value [`builtins.currentSystem`](../language/builtins.md) for the `system` argument. You can override this using `--arg`, e.g., `nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that since the argument is a Nix string literal, you have to escape the quotes.) - - `--argstr` *name* *value*\ + - [`--argstr`]{#opt-argstr} *name* *value*\ This option is like `--arg`, only the value is not a Nix expression but a string. So instead of `--arg system \"i686-linux\"` (the outer quotes are to keep the shell happy) you can say `--argstr system i686-linux`. - - `--attr` / `-A` *attrPath*\ + - [`--attr`]{#opt-attr} / `-A` *attrPath*\ Select an attribute from the top-level Nix expression being evaluated. (`nix-env`, `nix-instantiate`, `nix-build` and `nix-shell` only.) The *attribute path* *attrPath* is a sequence @@ -191,7 +191,7 @@ Most Nix commands accept the following command-line options: attribute of the fourth element of the array in the `foo` attribute of the top-level expression. - - `--expr` / `-E`\ + - [`--expr`]{#opt-expr} / `-E`\ Interpret the command line arguments as a list of Nix expressions to be parsed and evaluated, rather than as a list of file names of Nix expressions. (`nix-instantiate`, `nix-build` and `nix-shell` only.) @@ -202,17 +202,17 @@ Most Nix commands accept the following command-line options: use, give your expression to the `nix-shell -p` convenience flag instead. - - `-I` *path*\ + - [`-I`]{#opt-I} *path*\ Add a path to the Nix expression search path. This option may be given multiple times. See the `NIX_PATH` environment variable for information on the semantics of the Nix search path. Paths added through `-I` take precedence over `NIX_PATH`. - - `--option` *name* *value*\ + - [`--option`]{#opt-option} *name* *value*\ Set the Nix configuration option *name* to *value*. This overrides settings in the Nix configuration file (see nix.conf5). - - `--repair`\ + - [`--repair`]{#opt-repair}\ Fix corrupted or missing store paths by redownloading or rebuilding them. Note that this is slow because it requires computing a cryptographic hash of the contents of every path in the closure of diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 90a8f1f94..9f7d5057b 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -42,7 +42,7 @@ $ nix develop ``` To get a shell with a different compilation environment (e.g. stdenv, -gccStdenv, clangStdenv, clang11Stdenv): +gccStdenv, clangStdenv, clang11Stdenv, ccacheStdenv): ```console $ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages @@ -54,6 +54,9 @@ or if you have a flake-enabled nix: $ nix develop .#clang11StdenvPackages ``` +Note: you can use `ccacheStdenv` to drastically improve rebuild +time. By default, ccache keeps artifacts in `~/.cache/ccache/`. + To build Nix itself in this shell: ```console @@ -71,18 +74,6 @@ To install it in `$(pwd)/outputs` and test it: nix (Nix) 3.0 ``` -To run a functional test: - -```console -make tests/test-name-should-auto-complete.sh.test -``` - -To run the unit-tests for C++ code: - -``` -make check -``` - If you have a flakes-enabled Nix you can replace: ```console @@ -94,3 +85,96 @@ by: ```console $ nix develop ``` + +## Running tests + +### Unit-tests + +The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined +under `src/{library_name}/tests` using the +[googletest](https://google.github.io/googletest/) framework. + +You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option. + +### Functional tests + +The functional tests reside under the `tests` directory and are listed in `tests/local.mk`. +The whole testsuite can be run with `make install && make installcheck`. +Individual tests can be run with `make tests/{testName}.sh.test`. + +### Integration tests + +The integration tests are defined in the Nix flake under the `hydraJobs.tests` attribute. +These tests include everything that needs to interact with external services or run Nix in a non-trivial distributed setup. +Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>). + +You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}` + +### Installer tests + +After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch. + +Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91): + +- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache: + - `x86_64-linux` + - `armv6l-linux` + - `armv7l-linux` + - `x86_64-darwin` + +- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command. + +#### One-time setup + +1. Have a GitHub account with a fork of the [Nix repository](https://github.com/NixOS/nix). +2. At cachix.org: + - Create or log in to an account. + - Create a Cachix cache using the format `<github-username>-nix-install-tests`. + - Navigate to the new cache > Settings > Auth Tokens. + - Generate a new Cachix auth token and copy the generated value. +3. At github.com: + - Navigate to your Nix fork > Settings > Secrets > Actions > New repository secret. + - Name the secret `CACHIX_AUTH_TOKEN`. + - Paste the copied value of the Cachix cache auth token. + +#### Using the CI-generated installer for manual testing + +After the CI run completes, you can check the output to extract the installer URL: +1. Click into the detailed view of the CI run. +2. Click into any `installer_test` run (the URL you're here to extract will be the same in all of them). +3. Click into the `Run cachix/install-nix-action@v...` step and click the detail triangle next to the first log line (it will also be `Run cachix/install-nix-action@v...`) +4. Copy the value of `install_url` +5. To generate an install command, plug this `install_url` and your GitHub username into this template: + + ```console + sh <(curl -L <install_url>) --tarball-url-prefix https://<github-username>-nix-install-tests.cachix.org/serve + ``` + +<!-- #### Manually generating test installers + +There's obviously a manual way to do this, and it's still the only way for +platforms that lack GA runners. + +I did do this back in Fall 2020 (before the GA approach encouraged here). I'll +sketch what I recall in case it encourages someone to fill in detail, but: I +didn't know what I was doing at the time and had to fumble/ask around a lot-- +so I don't want to uphold any of it as "right". It may have been dumb or +the _hard_ way from the getgo. Fundamentals may have changed since. + +Here's the build command I used to do this on and for x86_64-darwin: +nix build --out-link /tmp/foo ".#checks.x86_64-darwin.binaryTarball" + +I used the stable out-link to make it easier to script the next steps: +link=$(readlink /tmp/foo) +cp $link/*-darwin.tar.xz ~/somewheres + +I've lost the last steps and am just going from memory: + +From here, I think I had to extract and modify the `install` script to point +it at this tarball (which I scped to my own site, but it might make more sense +to just share them locally). I extracted this script once and then just +search/replaced in it for each new build. + +The installer now supports a `--tarball-url-prefix` flag which _may_ have +solved this need? +--> diff --git a/doc/manual/src/expressions/arguments-variables.md b/doc/manual/src/expressions/arguments-variables.md deleted file mode 100644 index 12198c879..000000000 --- a/doc/manual/src/expressions/arguments-variables.md +++ /dev/null @@ -1,80 +0,0 @@ -# Arguments and Variables - -The [Nix expression for GNU Hello](expression-syntax.md) is a -function; it is missing some arguments that have to be filled in -somewhere. In the Nix Packages collection this is done in the file -`pkgs/top-level/all-packages.nix`, where all Nix expressions for -packages are imported and called with the appropriate arguments. Here -are some fragments of `all-packages.nix`, with annotations of what -they mean: - -```nix -... - -rec { ① - - hello = import ../applications/misc/hello/ex-1 ② { ③ - inherit fetchurl stdenv perl; - }; - - perl = import ../development/interpreters/perl { ④ - inherit fetchurl stdenv; - }; - - fetchurl = import ../build-support/fetchurl { - inherit stdenv; ... - }; - - stdenv = ...; - -} -``` - -1. This file defines a set of attributes, all of which are concrete - derivations (i.e., not functions). In fact, we define a *mutually - recursive* set of attributes. That is, the attributes can refer to - each other. This is precisely what we want since we want to “plug” - the various packages into each other. - -2. Here we *import* the Nix expression for GNU Hello. The import - operation just loads and returns the specified Nix expression. In - fact, we could just have put the contents of the Nix expression - for GNU Hello in `all-packages.nix` at this point. That would be - completely equivalent, but it would make `all-packages.nix` rather - bulky. - - Note that we refer to `../applications/misc/hello/ex-1`, not - `../applications/misc/hello/ex-1/default.nix`. When you try to - import a directory, Nix automatically appends `/default.nix` to the - file name. - -3. This is where the actual composition takes place. Here we *call* the - function imported from `../applications/misc/hello/ex-1` with a set - containing the things that the function expects, namely `fetchurl`, - `stdenv`, and `perl`. We use inherit again to use the attributes - defined in the surrounding scope (we could also have written - `fetchurl = fetchurl;`, etc.). - - The result of this function call is an actual derivation that can be - built by Nix (since when we fill in the arguments of the function, - what we get is its body, which is the call to `stdenv.mkDerivation` - in the [Nix expression for GNU Hello](expression-syntax.md)). - - > **Note** - > - > Nixpkgs has a convenience function `callPackage` that imports and - > calls a function, filling in any missing arguments by passing the - > corresponding attribute from the Nixpkgs set, like this: - > - > ```nix - > hello = callPackage ../applications/misc/hello/ex-1 { }; - > ``` - > - > If necessary, you can set or override arguments: - > - > ```nix - > hello = callPackage ../applications/misc/hello/ex-1 { stdenv = myStdenv; }; - > ``` - -4. Likewise, we have to instantiate Perl, `fetchurl`, and the standard - environment. diff --git a/doc/manual/src/expressions/build-script.md b/doc/manual/src/expressions/build-script.md deleted file mode 100644 index b1eacae88..000000000 --- a/doc/manual/src/expressions/build-script.md +++ /dev/null @@ -1,70 +0,0 @@ -# Build Script - -Here is the builder referenced from Hello's Nix expression (stored in -`pkgs/applications/misc/hello/ex-1/builder.sh`): - -```bash -source $stdenv/setup ① - -PATH=$perl/bin:$PATH ② - -tar xvfz $src ③ -cd hello-* -./configure --prefix=$out ④ -make ⑤ -make install -``` - -The builder can actually be made a lot shorter by using the *generic -builder* functions provided by `stdenv`, but here we write out the build -steps to elucidate what a builder does. It performs the following steps: - -1. When Nix runs a builder, it initially completely clears the - environment (except for the attributes declared in the derivation). - This is done to prevent undeclared inputs from being used in the - build process. If for example the `PATH` contained `/usr/bin`, then - you might accidentally use `/usr/bin/gcc`. - - So the first step is to set up the environment. This is done by - calling the `setup` script of the standard environment. The - environment variable `stdenv` points to the location of the - standard environment being used. (It wasn't specified explicitly - as an attribute in Hello's Nix expression, but `mkDerivation` adds - it automatically.) - -2. Since Hello needs Perl, we have to make sure that Perl is in the - `PATH`. The `perl` environment variable points to the location of - the Perl package (since it was passed in as an attribute to the - derivation), so `$perl/bin` is the directory containing the Perl - interpreter. - -3. Now we have to unpack the sources. The `src` attribute was bound to - the result of fetching the Hello source tarball from the network, so - the `src` environment variable points to the location in the Nix - store to which the tarball was downloaded. After unpacking, we `cd` - to the resulting source directory. - - The whole build is performed in a temporary directory created in - `/tmp`, by the way. This directory is removed after the builder - finishes, so there is no need to clean up the sources afterwards. - Also, the temporary directory is always newly created, so you don't - have to worry about files from previous builds interfering with the - current build. - -4. GNU Hello is a typical Autoconf-based package, so we first have to - run its `configure` script. In Nix every package is stored in a - separate location in the Nix store, for instance - `/nix/store/9a54ba97fb71b65fda531012d0443ce2-hello-2.1.1`. Nix - computes this path by cryptographically hashing all attributes of - the derivation. The path is passed to the builder through the `out` - environment variable. So here we give `configure` the parameter - `--prefix=$out` to cause Hello to be installed in the expected - location. - -5. Finally we build Hello (`make`) and install it into the location - specified by `out` (`make install`). - -If you are wondering about the absence of error checking on the result -of various commands called in the builder: this is because the shell -script is evaluated with Bash's `-e` option, which causes the script to -be aborted if any command fails without an error check. diff --git a/doc/manual/src/expressions/expression-language.md b/doc/manual/src/expressions/expression-language.md deleted file mode 100644 index 267fcb983..000000000 --- a/doc/manual/src/expressions/expression-language.md +++ /dev/null @@ -1,12 +0,0 @@ -# Nix Expression Language - -The Nix expression language is a pure, lazy, functional language. Purity -means that operations in the language don't have side-effects (for -instance, there is no variable assignment). Laziness means that -arguments to functions are evaluated only when they are needed. -Functional means that functions are “normal” values that can be passed -around and manipulated in interesting ways. The language is not a -full-featured, general purpose language. Its main job is to describe -packages, compositions of packages, and the variability within packages. - -This section presents the various features of the language. diff --git a/doc/manual/src/expressions/expression-syntax.md b/doc/manual/src/expressions/expression-syntax.md deleted file mode 100644 index 6b93e692c..000000000 --- a/doc/manual/src/expressions/expression-syntax.md +++ /dev/null @@ -1,93 +0,0 @@ -# Expression Syntax - -Here is a Nix expression for GNU Hello: - -```nix -{ stdenv, fetchurl, perl }: ① - -stdenv.mkDerivation { ② - name = "hello-2.1.1"; ③ - builder = ./builder.sh; ④ - src = fetchurl { ⑤ - url = "ftp://ftp.nluug.nl/pub/gnu/hello/hello-2.1.1.tar.gz"; - sha256 = "1md7jsfd8pa45z73bz1kszpp01yw6x5ljkjk2hx7wl800any6465"; - }; - inherit perl; ⑥ -} -``` - -This file is actually already in the Nix Packages collection in -`pkgs/applications/misc/hello/ex-1/default.nix`. It is customary to -place each package in a separate directory and call the single Nix -expression in that directory `default.nix`. The file has the following -elements (referenced from the figure by number): - -1. This states that the expression is a *function* that expects to be - called with three arguments: `stdenv`, `fetchurl`, and `perl`. They - are needed to build Hello, but we don't know how to build them here; - that's why they are function arguments. `stdenv` is a package that - is used by almost all Nix Packages; it provides a - “standard” environment consisting of the things you would expect - in a basic Unix environment: a C/C++ compiler (GCC, to be precise), - the Bash shell, fundamental Unix tools such as `cp`, `grep`, `tar`, - etc. `fetchurl` is a function that downloads files. `perl` is the - Perl interpreter. - - Nix functions generally have the form `{ x, y, ..., z }: e` where - `x`, `y`, etc. are the names of the expected arguments, and where - *e* is the body of the function. So here, the entire remainder of - the file is the body of the function; when given the required - arguments, the body should describe how to build an instance of - the Hello package. - -2. So we have to build a package. Building something from other stuff - is called a *derivation* in Nix (as opposed to sources, which are - built by humans instead of computers). We perform a derivation by - calling `stdenv.mkDerivation`. `mkDerivation` is a function - provided by `stdenv` that builds a package from a set of - *attributes*. A set is just a list of key/value pairs where each - key is a string and each value is an arbitrary Nix - expression. They take the general form `{ name1 = expr1; ... - nameN = exprN; }`. - -3. The attribute `name` specifies the symbolic name and version of - the package. Nix doesn't really care about these things, but they - are used by for instance `nix-env -q` to show a “human-readable” - name for packages. This attribute is required by `mkDerivation`. - -4. The attribute `builder` specifies the builder. This attribute can - sometimes be omitted, in which case `mkDerivation` will fill in a - default builder (which does a `configure; make; make install`, in - essence). Hello is sufficiently simple that the default builder - would suffice, but in this case, we will show an actual builder - for educational purposes. The value `./builder.sh` refers to the - shell script shown in the [next section](build-script.md), - discussed below. - -5. The builder has to know what the sources of the package are. Here, - the attribute `src` is bound to the result of a call to the - `fetchurl` function. Given a URL and a SHA-256 hash of the expected - contents of the file at that URL, this function builds a derivation - that downloads the file and checks its hash. So the sources are a - dependency that like all other dependencies is built before Hello - itself is built. - - Instead of `src` any other name could have been used, and in fact - there can be any number of sources (bound to different attributes). - However, `src` is customary, and it's also expected by the default - builder (which we don't use in this example). - -6. Since the derivation requires Perl, we have to pass the value of the - `perl` function argument to the builder. All attributes in the set - are actually passed as environment variables to the builder, so - declaring an attribute - - ```nix - perl = perl; - ``` - - will do the trick: it binds an attribute `perl` to the function - argument which also happens to be called `perl`. However, it looks a - bit silly, so there is a shorter syntax. The `inherit` keyword - causes the specified attributes to be bound to whatever variables - with the same name happen to be in scope. diff --git a/doc/manual/src/expressions/generic-builder.md b/doc/manual/src/expressions/generic-builder.md deleted file mode 100644 index cf26b5f82..000000000 --- a/doc/manual/src/expressions/generic-builder.md +++ /dev/null @@ -1,66 +0,0 @@ -# Generic Builder Syntax - -Recall that the [build script for GNU Hello](build-script.md) looked -something like this: - -```bash -PATH=$perl/bin:$PATH -tar xvfz $src -cd hello-* -./configure --prefix=$out -make -make install -``` - -The builders for almost all Unix packages look like this — set up some -environment variables, unpack the sources, configure, build, and -install. For this reason the standard environment provides some Bash -functions that automate the build process. Here is what a builder using -the generic build facilities looks like: - -```bash -buildInputs="$perl" ① - -source $stdenv/setup ② - -genericBuild ③ -``` - -Here is what each line means: - -1. The `buildInputs` variable tells `setup` to use the indicated - packages as “inputs”. This means that if a package provides a `bin` - subdirectory, it's added to `PATH`; if it has a `include` - subdirectory, it's added to GCC's header search path; and so on. - (This is implemented in a modular way: `setup` tries to source the - file `pkg/nix-support/setup-hook` of all dependencies. These “setup - hooks” can then set up whatever environment variables they want; for - instance, the setup hook for Perl sets the `PERL5LIB` environment - variable to contain the `lib/site_perl` directories of all inputs.) - -2. The function `genericBuild` is defined in the file `$stdenv/setup`. - -3. The final step calls the shell function `genericBuild`, which - performs the steps that were done explicitly in the previous build - script. The generic builder is smart enough to figure out whether - to unpack the sources using `gzip`, `bzip2`, etc. It can be - customised in many ways; see the Nixpkgs manual for details. - -Discerning readers will note that the `buildInputs` could just as well -have been set in the Nix expression, like this: - -```nix - buildInputs = [ perl ]; -``` - -The `perl` attribute can then be removed, and the builder becomes even -shorter: - -```bash -source $stdenv/setup -genericBuild -``` - -In fact, `mkDerivation` provides a default builder that looks exactly -like that, so it is actually possible to omit the builder for Hello -entirely. diff --git a/doc/manual/src/expressions/language-values.md b/doc/manual/src/expressions/language-values.md deleted file mode 100644 index 75ae9f2eb..000000000 --- a/doc/manual/src/expressions/language-values.md +++ /dev/null @@ -1,251 +0,0 @@ -# Values - -## Simple Values - -Nix has the following basic data types: - - - *Strings* can be written in three ways. - - The most common way is to enclose the string between double quotes, - e.g., `"foo bar"`. Strings can span multiple lines. The special - characters `"` and `\` and the character sequence `${` must be - escaped by prefixing them with a backslash (`\`). Newlines, carriage - returns and tabs can be written as `\n`, `\r` and `\t`, - respectively. - - You can include the result of an expression into a string by - enclosing it in `${...}`, a feature known as *antiquotation*. The - enclosed expression must evaluate to something that can be coerced - into a string (meaning that it must be a string, a path, or a - derivation). For instance, rather than writing - - ```nix - "--with-freetype2-library=" + freetype + "/lib" - ``` - - (where `freetype` is a derivation), you can instead write the more - natural - - ```nix - "--with-freetype2-library=${freetype}/lib" - ``` - - The latter is automatically translated to the former. A more - complicated example (from the Nix expression for - [Qt](http://www.trolltech.com/products/qt)): - - ```nix - configureFlags = " - -system-zlib -system-libpng -system-libjpeg - ${if openglSupport then "-dlopen-opengl - -L${mesa}/lib -I${mesa}/include - -L${libXmu}/lib -I${libXmu}/include" else ""} - ${if threadSupport then "-thread" else "-no-thread"} - "; - ``` - - Note that Nix expressions and strings can be arbitrarily nested; in - this case the outer string contains various antiquotations that - themselves contain strings (e.g., `"-thread"`), some of which in - turn contain expressions (e.g., `${mesa}`). - - The second way to write string literals is as an *indented string*, - which is enclosed between pairs of *double single-quotes*, like so: - - ```nix - '' - This is the first line. - This is the second line. - This is the third line. - '' - ``` - - This kind of string literal intelligently strips indentation from - the start of each line. To be precise, it strips from each line a - number of spaces equal to the minimal indentation of the string as a - whole (disregarding the indentation of empty lines). For instance, - the first and second line are indented two spaces, while the third - line is indented four spaces. Thus, two spaces are stripped from - each line, so the resulting string is - - ```nix - "This is the first line.\nThis is the second line.\n This is the third line.\n" - ``` - - Note that the whitespace and newline following the opening `''` is - ignored if there is no non-whitespace text on the initial line. - - Antiquotation (`${expr}`) is supported in indented strings. - - Since `${` and `''` have special meaning in indented strings, you - need a way to quote them. `$` can be escaped by prefixing it with - `''` (that is, two single quotes), i.e., `''$`. `''` can be escaped - by prefixing it with `'`, i.e., `'''`. `$` removes any special - meaning from the following `$`. Linefeed, carriage-return and tab - characters can be written as `''\n`, `''\r`, `''\t`, and `''\` - escapes any other character. - - Indented strings are primarily useful in that they allow multi-line - string literals to follow the indentation of the enclosing Nix - expression, and that less escaping is typically necessary for - strings representing languages such as shell scripts and - configuration files because `''` is much less common than `"`. - Example: - - ```nix - stdenv.mkDerivation { - ... - postInstall = - '' - mkdir $out/bin $out/etc - cp foo $out/bin - echo "Hello World" > $out/etc/foo.conf - ${if enableBar then "cp bar $out/bin" else ""} - ''; - ... - } - ``` - - Finally, as a convenience, *URIs* as defined in appendix B of - [RFC 2396](http://www.ietf.org/rfc/rfc2396.txt) can be written *as - is*, without quotes. For instance, the string - `"http://example.org/foo.tar.bz2"` can also be written as - `http://example.org/foo.tar.bz2`. - - - Numbers, which can be *integers* (like `123`) or *floating point* - (like `123.43` or `.27e13`). - - Numbers are type-compatible: pure integer operations will always - return integers, whereas any operation involving at least one - floating point number will have a floating point number as a result. - - - *Paths*, e.g., `/bin/sh` or `./builder.sh`. A path must contain at - least one slash to be recognised as such. For instance, `builder.sh` - is not a path: it's parsed as an expression that selects the - attribute `sh` from the variable `builder`. If the file name is - relative, i.e., if it does not begin with a slash, it is made - absolute at parse time relative to the directory of the Nix - expression that contained it. For instance, if a Nix expression in - `/foo/bar/bla.nix` refers to `../xyzzy/fnord.nix`, the absolute path - is `/foo/xyzzy/fnord.nix`. - - If the first component of a path is a `~`, it is interpreted as if - the rest of the path were relative to the user's home directory. - e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user - whose home directory is `/home/edolstra`. - - Paths can also be specified between angle brackets, e.g. - `<nixpkgs>`. This means that the directories listed in the - environment variable `NIX_PATH` will be searched for the given file - or directory name. - - Antiquotation is supported in any paths except those in angle brackets. - `./${foo}-${bar}.nix` is a more convenient way of writing - `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At - least one slash must appear *before* any antiquotations for this to be - recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division - operation. `./a.${foo}/b.${bar}` is a path. - - - *Booleans* with values `true` and `false`. - - - The null value, denoted as `null`. - -## Lists - -Lists are formed by enclosing a whitespace-separated list of values -between square brackets. For example, - -```nix -[ 123 ./foo.nix "abc" (f { x = y; }) ] -``` - -defines a list of four elements, the last being the result of a call to -the function `f`. Note that function calls have to be enclosed in -parentheses. If they had been omitted, e.g., - -```nix -[ 123 ./foo.nix "abc" f { x = y; } ] -``` - -the result would be a list of five elements, the fourth one being a -function and the fifth being a set. - -Note that lists are only lazy in values, and they are strict in length. - -## Sets - -Sets are really the core of the language, since ultimately the Nix -language is all about creating derivations, which are really just sets -of attributes to be passed to build scripts. - -Sets are just a list of name/value pairs (called *attributes*) enclosed -in curly brackets, where each value is an arbitrary expression -terminated by a semicolon. For example: - -```nix -{ x = 123; - text = "Hello"; - y = f { bla = 456; }; -} -``` - -This defines a set with attributes named `x`, `text`, `y`. The order of -the attributes is irrelevant. An attribute name may only occur once. - -Attributes can be selected from a set using the `.` operator. For -instance, - -```nix -{ a = "Foo"; b = "Bar"; }.a -``` - -evaluates to `"Foo"`. It is possible to provide a default value in an -attribute selection using the `or` keyword. For example, - -```nix -{ a = "Foo"; b = "Bar"; }.c or "Xyzzy" -``` - -will evaluate to `"Xyzzy"` because there is no `c` attribute in the set. - -You can use arbitrary double-quoted strings as attribute names: - -```nix -{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}" -``` - -This will evaluate to `123` (Assuming `bar` is antiquotable). In the -case where an attribute name is just a single antiquotation, the quotes -can be dropped: - -```nix -{ foo = 123; }.${bar} or 456 -``` - -This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced -to a string and `456` otherwise (again assuming `bar` is antiquotable). - -In the special case where an attribute name inside of a set declaration -evaluates to `null` (which is normally an error, as `null` is not -antiquotable), that attribute is simply not added to the set: - -```nix -{ ${if foo then "bar" else null} = true; } -``` - -This will evaluate to `{}` if `foo` evaluates to `false`. - -A set that has a `__functor` attribute whose value is callable (i.e. is -itself a function or a set with a `__functor` attribute whose value is -callable) can be applied as if it were a function, with the set itself -passed in first , e.g., - -```nix -let add = { __functor = self: x: x + self.x; }; - inc = add // { x = 1; }; -in inc 1 -``` - -evaluates to `2`. This can be used to attach metadata to a function -without the caller needing to treat it specially, or to implement a form -of object-oriented programming, for example. diff --git a/doc/manual/src/expressions/simple-building-testing.md b/doc/manual/src/expressions/simple-building-testing.md deleted file mode 100644 index 7f0d8f841..000000000 --- a/doc/manual/src/expressions/simple-building-testing.md +++ /dev/null @@ -1,61 +0,0 @@ -# Building and Testing - -You can now try to build Hello. Of course, you could do `nix-env -f . -iA -hello`, but you may not want to install a possibly broken package just -yet. The best way to test the package is by using the command -`nix-build`, which builds a Nix expression and creates a symlink named -`result` in the current directory: - -```console -$ nix-build -A hello -building path `/nix/store/632d2b22514d...-hello-2.1.1' -hello-2.1.1/ -hello-2.1.1/intl/ -hello-2.1.1/intl/ChangeLog -... - -$ ls -l result -lrwxrwxrwx ... 2006-09-29 10:43 result -> /nix/store/632d2b22514d...-hello-2.1.1 - -$ ./result/bin/hello -Hello, world! -``` - -The `-A` option selects the `hello` attribute. This is faster than -using the symbolic package name specified by the `name` attribute -(which also happens to be `hello`) and is unambiguous (there can be -multiple packages with the symbolic name `hello`, but there can be -only one attribute in a set named `hello`). - -`nix-build` registers the `./result` symlink as a garbage collection -root, so unless and until you delete the `./result` symlink, the output -of the build will be safely kept on your system. You can use -`nix-build`’s `-o` switch to give the symlink another name. - -Nix has transactional semantics. Once a build finishes successfully, Nix -makes a note of this in its database: it registers that the path denoted -by `out` is now “valid”. If you try to build the derivation again, Nix -will see that the path is already valid and finish immediately. If a -build fails, either because it returns a non-zero exit code, because Nix -or the builder are killed, or because the machine crashes, then the -output paths will not be registered as valid. If you try to build the -derivation again, Nix will remove the output paths if they exist (e.g., -because the builder died half-way through `make -install`) and try again. Note that there is no “negative caching”: Nix -doesn't remember that a build failed, and so a failed build can always -be repeated. This is because Nix cannot distinguish between permanent -failures (e.g., a compiler error due to a syntax error in the source) -and transient failures (e.g., a disk full condition). - -Nix also performs locking. If you run multiple Nix builds -simultaneously, and they try to build the same derivation, the first Nix -instance that gets there will perform the build, while the others block -(or perform other derivations if available) until the build finishes: - -```console -$ nix-build -A hello -waiting for lock on `/nix/store/0h5b7hp8d4hqfrw8igvx97x1xawrjnac-hello-2.1.1x' -``` - -So it is always safe to run multiple instances of Nix in parallel (which -isn’t the case with, say, `make`). diff --git a/doc/manual/src/expressions/simple-expression.md b/doc/manual/src/expressions/simple-expression.md deleted file mode 100644 index 857f71b9b..000000000 --- a/doc/manual/src/expressions/simple-expression.md +++ /dev/null @@ -1,23 +0,0 @@ -# A Simple Nix Expression - -This section shows how to add and test the [GNU Hello -package](http://www.gnu.org/software/hello/hello.html) to the Nix -Packages collection. Hello is a program that prints out the text “Hello, -world\!”. - -To add a package to the Nix Packages collection, you generally need to -do three things: - -1. Write a Nix expression for the package. This is a file that - describes all the inputs involved in building the package, such as - dependencies, sources, and so on. - -2. Write a *builder*. This is a shell script that builds the package - from the inputs. (In fact, it can be written in any language, but - typically it's a `bash` shell script.) - -3. Add the package to the file `pkgs/top-level/all-packages.nix`. The - Nix expression written in the first step is a *function*; it - requires other packages in order to build it. In this step you put - it all together, i.e., you call the function with the right - arguments to build the actual package. diff --git a/doc/manual/src/expressions/writing-nix-expressions.md b/doc/manual/src/expressions/writing-nix-expressions.md deleted file mode 100644 index 5664108e7..000000000 --- a/doc/manual/src/expressions/writing-nix-expressions.md +++ /dev/null @@ -1,12 +0,0 @@ -This chapter shows you how to write Nix expressions, which instruct Nix -how to build packages. It starts with a simple example (a Nix expression -for GNU Hello), and then moves on to a more in-depth look at the Nix -expression language. - -> **Note** -> -> This chapter is mostly about the Nix expression language. For more -> extensive information on adding packages to the Nix Packages -> collection (such as functions in the standard environment and coding -> conventions), please consult [its -> manual](http://nixos.org/nixpkgs/manual/). diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 71ff13275..70a0eb994 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -1,48 +1,100 @@ # Glossary - - derivation\ + - [derivation]{#gloss-derivation}\ A description of a build action. The result of a derivation is a store object. Derivations are typically specified in Nix expressions - using the [`derivation` primitive](expressions/derivations.md). These are + using the [`derivation` primitive](language/derivations.md). These are translated into low-level *store derivations* (implicitly by `nix-env` and `nix-build`, or explicitly by `nix-instantiate`). - - store\ + - [content-addressed derivation]{#gloss-content-addressed-derivation}\ + A derivation which has the + [`__contentAddressed`](language/advanced-attributes.md#adv-attr-__contentAddressed) + attribute set to `true`. + + - [fixed-output derivation]{#gloss-fixed-output-derivation}\ + A derivation which includes the + [`outputHash`](language/advanced-attributes.md#adv-attr-outputHash) attribute. + + - [store]{#gloss-store}\ The location in the file system where store objects live. Typically `/nix/store`. - - store path\ + From the perspective of the location where Nix is + invoked, the Nix store can be referred to + as a "_local_" or a "_remote_" one: + + + A *local store* exists on the filesystem of + the machine where Nix is invoked. You can use other + local stores by passing the `--store` flag to the + `nix` command. Local stores can be used for building derivations. + + + A *remote store* exists anywhere other than the + local filesystem. One example is the `/nix/store` + directory on another machine, accessed via `ssh` or + served by the `nix-serve` Perl script. + + - [chroot store]{#gloss-chroot-store}\ + A local store whose canonical path is anything other than `/nix/store`. + + - [binary cache]{#gloss-binary-cache}\ + A *binary cache* is a Nix store which uses a different format: its + metadata and signatures are kept in `.narinfo` files rather than in a + Nix database. This different format simplifies serving store objects + over the network, but cannot host builds. Examples of binary caches + include S3 buckets and the [NixOS binary + cache](https://cache.nixos.org). + + - [store path]{#gloss-store-path}\ The location in the file system of a store object, i.e., an immediate child of the Nix store directory. - - store object\ + - [store object]{#gloss-store-object}\ A file that is an immediate child of the Nix store directory. These can be regular files, but also entire directory trees. Store objects can be sources (objects copied from outside of the store), derivation outputs (objects produced by running a build action), or derivations (files describing a build action). - - substitute\ + - [input-addressed store object]{#gloss-input-addressed-store-object}\ + A store object produced by building a + non-[content-addressed](#gloss-content-addressed-derivation), + non-[fixed-output](#gloss-fixed-output-derivation) + derivation. + + - [output-addressed store object]{#gloss-output-addressed-store-object}\ + A store object whose store path hashes its content. This + includes derivations, the outputs of + [content-addressed derivations](#gloss-content-addressed-derivation), + and the outputs of + [fixed-output derivations](#gloss-fixed-output-derivation). + + - [substitute]{#gloss-substitute}\ A substitute is a command invocation stored in the Nix database that describes how to build a store object, bypassing the normal build mechanism (i.e., derivations). Typically, the substitute builds the store object by downloading a pre-built version of the store object from some server. - - purity\ + - [substituter]{#gloss-substituter}\ + A *substituter* is an additional store from which Nix will + copy store objects it doesn't have. For details, see the + [`substituters` option](command-ref/conf-file.html#conf-substituters). + + - [purity]{#gloss-purity}\ The assumption that equal Nix derivations when run always produce the same output. This cannot be guaranteed in general (e.g., a builder can rely on external inputs such as the network or the system time) but the Nix model assumes it. - - Nix expression\ + - [Nix expression]{#gloss-nix-expression}\ A high-level description of software packages and compositions thereof. Deploying software using Nix entails writing Nix expressions for your packages. Nix expressions are translated to derivations that are stored in the Nix store. These derivations can then be built. - - reference\ + - [reference]{#gloss-reference}\ A store path `P` is said to have a reference to a store path `Q` if the store object at `P` contains the path `Q` somewhere. The *references* of a store path are the set of store paths to which it @@ -52,11 +104,11 @@ output paths), whereas an output path only references other output paths. - - reachable\ + - [reachable]{#gloss-reachable}\ A store path `Q` is reachable from another store path `P` if `Q` is in the *closure* of the *references* relation. - - closure\ + - [closure]{#gloss-closure}\ The closure of a store path is the set of store paths that are directly or indirectly “reachable” from that store path; that is, it’s the closure of the path under the *references* relation. For @@ -71,34 +123,34 @@ to path `Q`, then `Q` is in the closure of `P`. Further, if `Q` references `R` then `R` is also in the closure of `P`. - - output path\ + - [output path]{#gloss-output-path}\ A store path produced by a derivation. - - deriver\ + - [deriver]{#gloss-deriver}\ The deriver of an *output path* is the store derivation that built it. - - validity\ + - [validity]{#gloss-validity}\ A store path is considered *valid* if it exists in the file system, is listed in the Nix database as being valid, and if all paths in its closure are also valid. - - user environment\ + - [user environment]{#gloss-user-env}\ An automatically generated store object that consists of a set of symlinks to “active” applications, i.e., other store paths. These are generated automatically by [`nix-env`](command-ref/nix-env.md). See *profiles*. - - profile\ + - [profile]{#gloss-profile}\ A symlink to the current *user environment* of a user, e.g., `/nix/var/nix/profiles/default`. - - NAR\ + - [NAR]{#gloss-nar}\ A *N*ix *AR*chive. This is a serialisation of a path in the Nix store. It can contain regular files, directories and symbolic links. NARs are generated and unpacked using `nix-store --dump` and `nix-store --restore`. - - `∅` \ + - [`∅`]{#gloss-emtpy-set}\ The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile. - - `ε` \ + - [`ε`]{#gloss-epsilon}\ The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute. diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md index 4367654a2..2d007ca1b 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/src/installation/installing-binary.md @@ -13,7 +13,7 @@ for your platform: - multi-user on macOS > **Notes on read-only filesystem root in macOS 10.15 Catalina +** - > + > > - It took some time to support this cleanly. You may see posts, > examples, and tutorials using obsolete workarounds. > - Supporting it cleanly made macOS installs too complex to qualify @@ -31,8 +31,8 @@ $ sh <(curl -L https://nixos.org/nix/install) --no-daemon ``` This will perform a single-user installation of Nix, meaning that `/nix` -is owned by the invoking user. You should run this under your usual user -account, *not* as root. The script will invoke `sudo` to create `/nix` +is owned by the invoking user. You can run this under your usual user +account or root. The script will invoke `sudo` to create `/nix` if it doesn’t already exist. If you don’t have `sudo`, you should manually create `/nix` first as root, e.g.: @@ -71,11 +71,11 @@ $ sh <(curl -L https://nixos.org/nix/install) --daemon The multi-user installation of Nix will create build users between the user IDs 30001 and 30032, and a group with the group ID 30000. You -should run this under your usual user account, *not* as root. The script +can run this under your usual user account or root. The script will invoke `sudo` as needed. > **Note** -> +> > If you need Nix to use a different group ID or user ID set, you will > have to download the tarball manually and [edit the install > script](#installing-from-a-binary-tarball). @@ -84,7 +84,9 @@ The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist. The installer will first back up these files with a `.backup-before-nix` extension. The installer will also create `/etc/profile.d/nix.sh`. -You can uninstall Nix with the following commands: +## Uninstalling + +### Linux ```console sudo rm -rf /etc/profile/nix.sh /etc/nix /nix ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels @@ -95,21 +97,114 @@ sudo systemctl stop nix-daemon.service sudo systemctl disable nix-daemon.socket sudo systemctl disable nix-daemon.service sudo systemctl daemon-reload - -# If you are on macOS, you will need to run: -sudo launchctl unload /Library/LaunchDaemons/org.nixos.nix-daemon.plist -sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist ``` There may also be references to Nix in `/etc/profile`, `/etc/bashrc`, and `/etc/zshrc` which you may remove. -# macOS Installation <a name="sect-macos-installation-change-store-prefix"></a><a name="sect-macos-installation-encrypted-volume"></a><a name="sect-macos-installation-symlink"></a><a name="sect-macos-installation-recommended-notes"></a> +### macOS + +1. Edit `/etc/zshrc` and `/etc/bashrc` to remove the lines sourcing + `nix-daemon.sh`, which should look like this: + + ```bash + # Nix + if [ -e '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' ]; then + . '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' + fi + # End Nix + ``` + + If these files haven't been altered since installing Nix you can simply put + the backups back in place: + + ```console + sudo mv /etc/zshrc.backup-before-nix /etc/zshrc + sudo mv /etc/bashrc.backup-before-nix /etc/bashrc + ``` + + This will stop shells from sourcing the file and bringing everything you + installed using Nix in scope. + +2. Stop and remove the Nix daemon services: + + ```console + sudo launchctl unload /Library/LaunchDaemons/org.nixos.nix-daemon.plist + sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist + sudo launchctl unload /Library/LaunchDaemons/org.nixos.darwin-store.plist + sudo rm /Library/LaunchDaemons/org.nixos.darwin-store.plist + ``` + + This stops the Nix daemon and prevents it from being started next time you + boot the system. + +3. Remove the `nixbld` group and the `_nixbuildN` users: + + ```console + sudo dscl . -delete /Groups/nixbld + for u in $(sudo dscl . -list /Users | grep _nixbld); do sudo dscl . -delete /Users/$u; done + ``` + + This will remove all the build users that no longer serve a purpose. + +4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store + volume on `/nix`, which looks like + `UUID=<uuid> /nix apfs rw,noauto,nobrowse,suid,owners` or + `LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic + mounting of the Nix Store volume. + +5. Edit `/etc/synthetic.conf` to remove the `nix` line. If this is the only + line in the file you can remove it entirely, `sudo rm /etc/synthetic.conf`. + This will prevent the creation of the empty `/nix` directory to provide a + mountpoint for the Nix Store volume. + +6. Remove the files Nix added to your system: + + ```console + sudo rm -rf /etc/nix /var/root/.nix-profile /var/root/.nix-defexpr /var/root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels + ``` + + This gets rid of any data Nix may have created except for the store which is + removed next. + +7. Remove the Nix Store volume: + + ```console + sudo diskutil apfs deleteVolume /nix + ``` + + This will remove the Nix Store volume and everything that was added to the + store. + + If the output indicates that the command couldn't remove the volume, you should + make sure you don't have an _unmounted_ Nix Store volume. Look for a + "Nix Store" volume in the output of the following command: + + ```console + diskutil list + ``` + + If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil + deleteVolume command, but replace `/nix` with the store volume's `diskXsY` + identifier. + +> **Note** +> +> After you complete the steps here, you will still have an empty `/nix` +> directory. This is an expected sign of a successful uninstall. The empty +> `/nix` directory will disappear the next time you reboot. +> +> You do not have to reboot to finish uninstalling Nix. The uninstall is +> complete. macOS (Catalina+) directly controls root directories and its +> read-only root will prevent you from manually deleting the empty `/nix` +> mountpoint. + +# macOS Installation +[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} <!-- Note: anchors above to catch permalinks to old explanations --> We believe we have ironed out how to cleanly support the read-only root -on modern macOS. New installs will do this automatically, and you can -also re-run a new installer to convert your existing setup. +on modern macOS. New installs will do this automatically. This section previously detailed the situation, options, and trade-offs, but it now only outlines what the installer does. You don't need to know diff --git a/doc/manual/src/expressions/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md index 000595815..2e7e80ed0 100644 --- a/doc/manual/src/expressions/advanced-attributes.md +++ b/doc/manual/src/language/advanced-attributes.md @@ -2,7 +2,7 @@ Derivations can declare some infrequently used optional attributes. - - `allowedReferences`\ + - [`allowedReferences`]{#adv-attr-allowedReferences}\ The optional attribute `allowedReferences` specifies a list of legal references (dependencies) of the output of the builder. For example, @@ -17,7 +17,7 @@ Derivations can declare some infrequently used optional attributes. booting Linux don’t have accidental dependencies on other paths in the Nix store. - - `allowedRequisites`\ + - [`allowedRequisites`]{#adv-attr-allowedRequisites}\ This attribute is similar to `allowedReferences`, but it specifies the legal requisites of the whole closure, so all the dependencies recursively. For example, @@ -30,7 +30,7 @@ Derivations can declare some infrequently used optional attributes. runtime dependency than `foobar`, and in addition it enforces that `foobar` itself doesn't introduce any other dependency itself. - - `disallowedReferences`\ + - [`disallowedReferences`]{#adv-attr-disallowedReferences}\ The optional attribute `disallowedReferences` specifies a list of illegal references (dependencies) of the output of the builder. For example, @@ -42,7 +42,7 @@ Derivations can declare some infrequently used optional attributes. enforces that the output of a derivation cannot have a direct runtime dependencies on the derivation `foo`. - - `disallowedRequisites`\ + - [`disallowedRequisites`]{#adv-attr-disallowedRequisites}\ This attribute is similar to `disallowedReferences`, but it specifies illegal requisites for the whole closure, so all the dependencies recursively. For example, @@ -55,7 +55,7 @@ Derivations can declare some infrequently used optional attributes. dependency on `foobar` or any other derivation depending recursively on `foobar`. - - `exportReferencesGraph`\ + - [`exportReferencesGraph`]{#adv-attr-exportReferencesGraph}\ This attribute allows builders access to the references graph of their inputs. The attribute is a list of inputs in the Nix store whose references graph the builder needs to know. The value of @@ -84,7 +84,7 @@ Derivations can declare some infrequently used optional attributes. with a Nix store containing the closure of a bootable NixOS configuration). - - `impureEnvVars`\ + - [`impureEnvVars`]{#adv-attr-impureEnvVars}\ This attribute allows you to specify a list of environment variables that should be passed from the environment of the calling user to the builder. Usually, the environment is cleared completely when the @@ -112,7 +112,7 @@ Derivations can declare some infrequently used optional attributes. > environmental variables come from the environment of the > `nix-build`. - - `outputHash`; `outputHashAlgo`; `outputHashMode`\ + - [`outputHash`]{#adv-attr-outputHash}; [`outputHashAlgo`]{#adv-attr-outputHashAlgo}; [`outputHashMode`]{#adv-attr-outputHashMode}\ These attributes declare that the derivation is a so-called *fixed-output derivation*, which means that a cryptographic hash of the output is already known in advance. When the build of a @@ -208,7 +208,7 @@ Derivations can declare some infrequently used optional attributes. [`nix-hash` command](../command-ref/nix-hash.md) for information about converting to and from base-32 notation.) - - `__contentAddressed` + - [`__contentAddressed`]{#adv-attr-__contentAddressed} If this **experimental** attribute is set to true, then the derivation outputs will be stored in a content-addressed location rather than the traditional input-addressed one. @@ -216,7 +216,7 @@ Derivations can declare some infrequently used optional attributes. Setting this attribute also requires setting `outputHashMode` and `outputHashAlgo` like for *fixed-output derivations* (see above). - - `passAsFile`\ + - [`passAsFile`]{#adv-attr-passAsFile}\ A list of names of attributes that should be passed via files rather than environment variables. For example, if you have @@ -234,7 +234,7 @@ Derivations can declare some infrequently used optional attributes. builder, since most operating systems impose a limit on the size of the environment (typically, a few hundred kilobyte). - - `preferLocalBuild`\ + - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\ If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of forwarded @@ -242,7 +242,7 @@ Derivations can declare some infrequently used optional attributes. where the cost of doing a download or remote build would exceed the cost of building locally. - - `allowSubstitutes`\ + - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\ If this attribute is set to `false`, then Nix will always build this derivation; it will not try to substitute its outputs. This is useful for very trivial derivations (such as `writeText` in Nixpkgs) diff --git a/doc/manual/src/expressions/builtin-constants.md b/doc/manual/src/language/builtin-constants.md index 1404289e5..78d066a82 100644 --- a/doc/manual/src/expressions/builtin-constants.md +++ b/doc/manual/src/language/builtin-constants.md @@ -14,7 +14,7 @@ Here are the constants built into the Nix expression evaluator: This allows a Nix expression to fall back gracefully on older Nix installations that don’t have the desired built-in function. - - `builtins.currentSystem`\ + - [`builtins.currentSystem`]{#builtins-currentSystem}\ The built-in value `currentSystem` evaluates to the Nix platform identifier for the Nix installation on which the expression is being evaluated, such as `"i686-linux"` or `"x86_64-darwin"`. diff --git a/doc/manual/src/expressions/builtins-prefix.md b/doc/manual/src/language/builtins-prefix.md index c631a8453..c631a8453 100644 --- a/doc/manual/src/expressions/builtins-prefix.md +++ b/doc/manual/src/language/builtins-prefix.md diff --git a/doc/manual/src/expressions/builtins-suffix.md b/doc/manual/src/language/builtins-suffix.md index a74db2857..a74db2857 100644 --- a/doc/manual/src/expressions/builtins-suffix.md +++ b/doc/manual/src/language/builtins-suffix.md diff --git a/doc/manual/src/expressions/language-constructs.md b/doc/manual/src/language/constructs.md index 1c01f2cc7..1c01f2cc7 100644 --- a/doc/manual/src/expressions/language-constructs.md +++ b/doc/manual/src/language/constructs.md diff --git a/doc/manual/src/expressions/derivations.md b/doc/manual/src/language/derivations.md index d26a33b7f..3391ec0d8 100644 --- a/doc/manual/src/expressions/derivations.md +++ b/doc/manual/src/language/derivations.md @@ -4,7 +4,7 @@ The most important built-in function is `derivation`, which is used to describe a single derivation (a build action). It takes as input a set, the attributes of which specify the inputs of the build. - - There must be an attribute named `system` whose value must be a + - There must be an attribute named [`system`]{#attr-system} whose value must be a string specifying a Nix system type, such as `"i686-linux"` or `"x86_64-darwin"`. (To figure out your system type, run `nix -vv --version`.) The build can only be performed on a machine and diff --git a/doc/manual/src/language/index.md b/doc/manual/src/language/index.md new file mode 100644 index 000000000..a4b402f8b --- /dev/null +++ b/doc/manual/src/language/index.md @@ -0,0 +1,33 @@ +# Nix Language + +The Nix language is + +- *domain-specific* + + It only exists for the Nix package manager: + to describe packages and configurations as well as their variants and compositions. + It is not intended for general purpose use. + +- *declarative* + + There is no notion of executing sequential steps. + Dependencies between operations are established only through data. + +- *pure* + + Values cannot change during computation. + Functions always produce the same output if their input does not change. + +- *functional* + + Functions are like any other value. + Functions can be assigned to names, taken as arguments, or returned by functions. + +- *lazy* + + Expressions are only evaluated when their value is needed. + +- *dynamically typed* + + Type errors are only detected when expressions are evaluated. + diff --git a/doc/manual/src/expressions/language-operators.md b/doc/manual/src/language/operators.md index 268b44f4c..32398189d 100644 --- a/doc/manual/src/expressions/language-operators.md +++ b/doc/manual/src/language/operators.md @@ -1,6 +1,6 @@ # Operators -The table below lists the operators in the Nix expression language, in +The table below lists the operators in the Nix language, in order of precedence (from strongest to weakest binding). | Name | Syntax | Associativity | Description | Precedence | diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md new file mode 100644 index 000000000..6fc8c0369 --- /dev/null +++ b/doc/manual/src/language/values.md @@ -0,0 +1,275 @@ +# Data Types + +## Primitives + +- <a id="type-string" href="#type-string">String</a> + + *Strings* can be written in three ways. + + The most common way is to enclose the string between double quotes, + e.g., `"foo bar"`. Strings can span multiple lines. The special + characters `"` and `\` and the character sequence `${` must be + escaped by prefixing them with a backslash (`\`). Newlines, carriage + returns and tabs can be written as `\n`, `\r` and `\t`, + respectively. + + You can include the result of an expression into a string by + enclosing it in `${...}`, a feature known as *antiquotation*. The + enclosed expression must evaluate to something that can be coerced + into a string (meaning that it must be a string, a path, or a + derivation). For instance, rather than writing + + ```nix + "--with-freetype2-library=" + freetype + "/lib" + ``` + + (where `freetype` is a derivation), you can instead write the more + natural + + ```nix + "--with-freetype2-library=${freetype}/lib" + ``` + + The latter is automatically translated to the former. A more + complicated example (from the Nix expression for + [Qt](http://www.trolltech.com/products/qt)): + + ```nix + configureFlags = " + -system-zlib -system-libpng -system-libjpeg + ${if openglSupport then "-dlopen-opengl + -L${mesa}/lib -I${mesa}/include + -L${libXmu}/lib -I${libXmu}/include" else ""} + ${if threadSupport then "-thread" else "-no-thread"} + "; + ``` + + Note that Nix expressions and strings can be arbitrarily nested; in + this case the outer string contains various antiquotations that + themselves contain strings (e.g., `"-thread"`), some of which in + turn contain expressions (e.g., `${mesa}`). + + The second way to write string literals is as an *indented string*, + which is enclosed between pairs of *double single-quotes*, like so: + + ```nix + '' + This is the first line. + This is the second line. + This is the third line. + '' + ``` + + This kind of string literal intelligently strips indentation from + the start of each line. To be precise, it strips from each line a + number of spaces equal to the minimal indentation of the string as a + whole (disregarding the indentation of empty lines). For instance, + the first and second line are indented two spaces, while the third + line is indented four spaces. Thus, two spaces are stripped from + each line, so the resulting string is + + ```nix + "This is the first line.\nThis is the second line.\n This is the third line.\n" + ``` + + Note that the whitespace and newline following the opening `''` is + ignored if there is no non-whitespace text on the initial line. + + Antiquotation (`${expr}`) is supported in indented strings. + + Since `${` and `''` have special meaning in indented strings, you + need a way to quote them. `$` can be escaped by prefixing it with + `''` (that is, two single quotes), i.e., `''$`. `''` can be escaped + by prefixing it with `'`, i.e., `'''`. `$` removes any special + meaning from the following `$`. Linefeed, carriage-return and tab + characters can be written as `''\n`, `''\r`, `''\t`, and `''\` + escapes any other character. + + Indented strings are primarily useful in that they allow multi-line + string literals to follow the indentation of the enclosing Nix + expression, and that less escaping is typically necessary for + strings representing languages such as shell scripts and + configuration files because `''` is much less common than `"`. + Example: + + ```nix + stdenv.mkDerivation { + ... + postInstall = + '' + mkdir $out/bin $out/etc + cp foo $out/bin + echo "Hello World" > $out/etc/foo.conf + ${if enableBar then "cp bar $out/bin" else ""} + ''; + ... + } + ``` + + Finally, as a convenience, *URIs* as defined in appendix B of + [RFC 2396](http://www.ietf.org/rfc/rfc2396.txt) can be written *as + is*, without quotes. For instance, the string + `"http://example.org/foo.tar.bz2"` can also be written as + `http://example.org/foo.tar.bz2`. + +- <a id="type-number" href="#type-number">Number</a> + + Numbers, which can be *integers* (like `123`) or *floating point* + (like `123.43` or `.27e13`). + + Numbers are type-compatible: pure integer operations will always + return integers, whereas any operation involving at least one + floating point number will have a floating point number as a result. + +- <a id="type-path" href="#type-path">Path</a> + + *Paths*, e.g., `/bin/sh` or `./builder.sh`. A path must contain at + least one slash to be recognised as such. For instance, `builder.sh` + is not a path: it's parsed as an expression that selects the + attribute `sh` from the variable `builder`. If the file name is + relative, i.e., if it does not begin with a slash, it is made + absolute at parse time relative to the directory of the Nix + expression that contained it. For instance, if a Nix expression in + `/foo/bar/bla.nix` refers to `../xyzzy/fnord.nix`, the absolute path + is `/foo/xyzzy/fnord.nix`. + + If the first component of a path is a `~`, it is interpreted as if + the rest of the path were relative to the user's home directory. + e.g. `~/foo` would be equivalent to `/home/edolstra/foo` for a user + whose home directory is `/home/edolstra`. + + Paths can also be specified between angle brackets, e.g. + `<nixpkgs>`. This means that the directories listed in the + environment variable `NIX_PATH` will be searched for the given file + or directory name. + + Antiquotation is supported in any paths except those in angle brackets. + `./${foo}-${bar}.nix` is a more convenient way of writing + `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At + least one slash must appear *before* any antiquotations for this to be + recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division + operation. `./a.${foo}/b.${bar}` is a path. + + When a path appears in an antiquotation, and is thus coerced into a string, + the path is first copied into the Nix store and the resulting string is + the Nix store path. For instance `"${./foo.txt}" will cause `foo.txt` in + the current directory to be copied into the Nix store and result in the + string `"/nix/store/<HASH>-foo.txt"`. + + Note that the Nix language assumes that all input files will remain + _unchanged_ during the course of the Nix expression evaluation. + If you for example antiquote a file path during a `nix repl` session, and + then later in the same session, after having changed the file contents, + evaluate the antiquotation with the file path again, then Nix will still + return the first store path. It will _not_ reread the file contents to + produce a different Nix store path. + +- <a id="type-boolean" href="#type-boolean">Boolean</a> + + *Booleans* with values `true` and `false`. + +- <a id="type-null" href="#type-null">Null</a> + + The null value, denoted as `null`. + +## List + +Lists are formed by enclosing a whitespace-separated list of values +between square brackets. For example, + +```nix +[ 123 ./foo.nix "abc" (f { x = y; }) ] +``` + +defines a list of four elements, the last being the result of a call to +the function `f`. Note that function calls have to be enclosed in +parentheses. If they had been omitted, e.g., + +```nix +[ 123 ./foo.nix "abc" f { x = y; } ] +``` + +the result would be a list of five elements, the fourth one being a +function and the fifth being a set. + +Note that lists are only lazy in values, and they are strict in length. + +## Attribute Set + +An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`). + +Names and values are separated by an equal sign (`=`). +Each value is an arbitrary expression terminated by a semicolon (`;`). + +Attributes can appear in any order. +An attribute name may only occur once. + +Example: + +```nix +{ + x = 123; + text = "Hello"; + y = f { bla = 456; }; +} +``` + +This defines a set with attributes named `x`, `text`, `y`. + +Attributes can be selected from a set using the `.` operator. For +instance, + +```nix +{ a = "Foo"; b = "Bar"; }.a +``` + +evaluates to `"Foo"`. It is possible to provide a default value in an +attribute selection using the `or` keyword. For example, + +```nix +{ a = "Foo"; b = "Bar"; }.c or "Xyzzy" +``` + +will evaluate to `"Xyzzy"` because there is no `c` attribute in the set. + +You can use arbitrary double-quoted strings as attribute names: + +```nix +{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}" +``` + +This will evaluate to `123` (Assuming `bar` is antiquotable). In the +case where an attribute name is just a single antiquotation, the quotes +can be dropped: + +```nix +{ foo = 123; }.${bar} or 456 +``` + +This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced +to a string and `456` otherwise (again assuming `bar` is antiquotable). + +In the special case where an attribute name inside of a set declaration +evaluates to `null` (which is normally an error, as `null` is not +antiquotable), that attribute is simply not added to the set: + +```nix +{ ${if foo then "bar" else null} = true; } +``` + +This will evaluate to `{}` if `foo` evaluates to `false`. + +A set that has a `__functor` attribute whose value is callable (i.e. is +itself a function or a set with a `__functor` attribute whose value is +callable) can be applied as if it were a function, with the set itself +passed in first , e.g., + +```nix +let add = { __functor = self: x: x + self.x; }; + inc = add // { x = 1; }; +in inc 1 +``` + +evaluates to `2`. This can be used to attach metadata to a function +without the caller needing to treat it specially, or to implement a form +of object-oriented programming, for example. diff --git a/doc/manual/src/package-management/package-management.md b/doc/manual/src/package-management/package-management.md index bd26a09ab..d528112e2 100644 --- a/doc/manual/src/package-management/package-management.md +++ b/doc/manual/src/package-management/package-management.md @@ -1,5 +1,4 @@ This chapter discusses how to do package management with Nix, i.e., how to obtain, install, upgrade, and erase packages. This is the “user’s” perspective of the Nix system — people who want to *create* -packages should consult the [chapter on writing Nix -expressions](../expressions/writing-nix-expressions.md). +packages should consult the chapter on the [Nix language](../language/index.md). diff --git a/doc/manual/src/release-notes/rl-2.10.md b/doc/manual/src/release-notes/rl-2.10.md new file mode 100644 index 000000000..b99dbeef0 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.10.md @@ -0,0 +1,31 @@ +# Release 2.10 (2022-07-11) + +* `nix repl` now takes installables on the command line, unifying the usage + with other commands that use `--file` and `--expr`. Primary breaking change + is for the common usage of `nix repl '<nixpkgs>'` which can be recovered with + `nix repl --file '<nixpkgs>'` or `nix repl --expr 'import <nixpkgs>{}'`. + + This is currently guarded by the `repl-flake` experimental feature. + +* A new function `builtins.traceVerbose` is available. It is similar + to `builtins.trace` if the `trace-verbose` setting is set to true, + and it is a no-op otherwise. + +* `nix search` has a new flag `--exclude` to filter out packages. + +* On Linux, if `/nix` doesn't exist and cannot be created and you're + not running as root, Nix will automatically use + `~/.local/share/nix/root` as a chroot store. This enables non-root + users to download the statically linked Nix binary and have it work + out of the box, e.g. + + ``` + # ~/nix run nixpkgs#hello + warning: '/nix' does not exists, so Nix will use '/home/ubuntu/.local/share/nix/root' as a chroot store + Hello, world! + ``` + +* `flake-registry.json` is now fetched from `channels.nixos.org`. + +* Nix can now be built with LTO by passing `--enable-lto` to `configure`. + LTO is currently only supported when building with GCC. diff --git a/doc/manual/src/release-notes/rl-2.11.md b/doc/manual/src/release-notes/rl-2.11.md new file mode 100644 index 000000000..b322a4e5e --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.11.md @@ -0,0 +1,5 @@ +# Release 2.11 (2022-08-24) + +* `nix copy` now copies the store paths in parallel as much as possible (again). + This doesn't apply for the `daemon` and `ssh-ng` stores which copy everything + in one batch to avoid latencies issues. diff --git a/doc/manual/src/release-notes/rl-2.7.md b/doc/manual/src/release-notes/rl-2.7.md new file mode 100644 index 000000000..2f3879422 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.7.md @@ -0,0 +1,33 @@ +# Release 2.7 (2022-03-07) + +* Nix will now make some helpful suggestions when you mistype + something on the command line. For instance, if you type `nix build + nixpkgs#thunderbrd`, it will suggest `thunderbird`. + +* A number of "default" flake output attributes have been + renamed. These are: + + * `defaultPackage.<system>` → `packages.<system>.default` + * `defaultApps.<system>` → `apps.<system>.default` + * `defaultTemplate` → `templates.default` + * `defaultBundler.<system>` → `bundlers.<system>.default` + * `overlay` → `overlays.default` + * `devShell.<system>` → `devShells.<system>.default` + + The old flake output attributes still work, but `nix flake check` + will warn about them. + +* Breaking API change: `nix bundle` now supports bundlers of the form + `bundler.<system>.<name>= derivation: another-derivation;`. This + supports additional functionality to inspect evaluation information + during bundling. A new + [repository](https://github.com/NixOS/bundlers) has various bundlers + implemented. + +* `nix store ping` now reports the version of the remote Nix daemon. + +* `nix flake {init,new}` now display information about which files have been + created. + +* Templates can now define a `welcomeText` attribute, which is printed out by + `nix flake {init,new} --template <template>`. diff --git a/doc/manual/src/release-notes/rl-2.8.md b/doc/manual/src/release-notes/rl-2.8.md new file mode 100644 index 000000000..9778e8c3a --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.8.md @@ -0,0 +1,53 @@ +# Release 2.8 (2022-04-19) + +* New experimental command: `nix fmt`, which applies a formatter + defined by the `formatter.<system>` flake output to the Nix + expressions in a flake. + +* Various Nix commands can now read expressions from standard input + using `--file -`. + +* New experimental builtin function `builtins.fetchClosure` that + copies a closure from a binary cache at evaluation time and rewrites + it to content-addressed form (if it isn't already). Like + `builtins.storePath`, this allows importing pre-built store paths; + the difference is that it doesn't require the user to configure + binary caches and trusted public keys. + + This function is only available if you enable the experimental + feature `fetch-closure`. + +* New experimental feature: *impure derivations*. These are + derivations that can produce a different result every time they're + built. Here is an example: + + ```nix + stdenv.mkDerivation { + name = "impure"; + __impure = true; # marks this derivation as impure + buildCommand = "date > $out"; + } + ``` + + Running `nix build` twice on this expression will build the + derivation twice, producing two different content-addressed store + paths. Like fixed-output derivations, impure derivations have access + to the network. Only fixed-output derivations and impure derivations + can depend on an impure derivation. + +* `nix store make-content-addressable` has been renamed to `nix store + make-content-addressed`. + +* The `nixosModule` flake output attribute has been renamed consistent + with the `.default` renames in Nix 2.7. + + * `nixosModule` → `nixosModules.default` + + As before, the old output will continue to work, but `nix flake check` will + issue a warning about it. + +* `nix run` is now stricter in what it accepts: members of the `apps` + flake output are now required to be apps (as defined in [the + manual](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-run.html#apps)), + and members of `packages` or `legacyPackages` must be derivations + (not apps). diff --git a/doc/manual/src/release-notes/rl-2.9.md b/doc/manual/src/release-notes/rl-2.9.md new file mode 100644 index 000000000..98cc4235d --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.9.md @@ -0,0 +1,47 @@ +# Release 2.9 (2022-05-30) + +* Running Nix with the new `--debugger` flag will cause it to start a + repl session if an exception is thrown during evaluation, or if + `builtins.break` is called. From there you can inspect the values + of variables and evaluate Nix expressions. In debug mode, the + following new repl commands are available: + + ``` + :env Show env stack + :bt Show trace stack + :st Show current trace + :st <idx> Change to another trace in the stack + :c Go until end of program, exception, or builtins.break(). + :s Go one step + ``` + + Read more about the debugger + [here](https://www.zknotes.com/note/5970). + +* Nix now provides better integration with zsh's `run-help` + feature. It is now included in the Nix installation in the form of + an autoloadable shell function, `run-help-nix`. It picks up Nix + subcommands from the currently typed in command and directs the user + to the associated man pages. + +* `nix repl` has a new build-and-link (`:bl`) command that builds a + derivation while creating GC root symlinks. + +* The path produced by `builtins.toFile` is now allowed to be imported + or read even with restricted evaluation. Note that this will not + work with a read-only store. + +* `nix build` has a new `--print-out-paths` flag to print the + resulting output paths. This matches the default behaviour of + `nix-build`. + +* You can now specify which outputs of a derivation `nix` should + operate on using the syntax `installable^outputs`, + e.g. `nixpkgs#glibc^dev,static` or `nixpkgs#glibc^*`. By default, + `nix` will use the outputs specified by the derivation's + `meta.outputsToInstall` attribute if it exists, or all outputs + otherwise. + +* `builtins.fetchTree` (and flake inputs) can now be used to fetch + plain files over the `http(s)` and `file` protocols in addition to + directory tarballs. diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md index 7dd8387d8..68f7d1a9d 100644 --- a/doc/manual/src/release-notes/rl-next.md +++ b/doc/manual/src/release-notes/rl-next.md @@ -1,28 +1,7 @@ # Release X.Y (202?-??-??) -* A number of "default" flake output attributes have been - renamed. These are: +* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which + defaults to `false`. If it is set to `true`, the `hash` and `sha256` + arguments will be ignored and the resulting derivation will have + `__impure` set to `true`, making it an impure derivation. - * `defaultPackage.<system>` → `packages.<system>.default` - * `defaultApps.<system>` → `apps.<system>.default` - * `defaultTemplate` → `templates.default` - * `defaultBundler.<system>` → `bundlers.<system>.default` - * `overlay` → `overlays.default` - * `devShell.<system>` → `devShells.<system>.default` - - The old flake output attributes still work, but `nix flake check` - will warn about them. - -* `nix bundle` breaking API change now supports bundlers of the form - `bundler.<system>.<name>= derivation: another-derivation;`. This supports - additional functionality to inspect evaluation information during bundling. A - new [repository](https://github.com/NixOS/bundlers) has various bundlers - implemented. - -* `nix store ping` now reports the version of the remote Nix daemon. - -* `nix flake {init,new}` now display information about which files have been - created. - -* Templates can now define a `welcomeText` attribute, which is printed out by - `nix flake {init,new} --template <template>`. diff --git a/doc/manual/utils.nix b/doc/manual/utils.nix index d4b18472f..d0643ef46 100644 --- a/doc/manual/utils.nix +++ b/doc/manual/utils.nix @@ -5,6 +5,32 @@ rec { concatStrings = concatStringsSep ""; + replaceStringsRec = from: to: string: + # recursively replace occurrences of `from` with `to` within `string` + # example: + # replaceStringRec "--" "-" "hello-----world" + # => "hello-world" + let + replaced = replaceStrings [ from ] [ to ] string; + in + if replaced == string then string else replaceStringsRec from to replaced; + + squash = replaceStringsRec "\n\n\n" "\n\n"; + + trim = string: + # trim trailing spaces and squash non-leading spaces + let + trimLine = line: + let + # separate leading spaces from the rest + parts = split "(^ *)" line; + spaces = head (elemAt parts 1); + rest = elemAt parts 2; + # drop trailing spaces + body = head (split " *$" rest); + in spaces + replaceStringsRec " " " " body; + in concatStringsSep "\n" (map trimLine (splitLines string)); + # FIXME: O(n^2) unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) []; diff --git a/docker.nix b/docker.nix index 251bd2f46..bb2b4e7ff 100644 --- a/docker.nix +++ b/docker.nix @@ -2,8 +2,12 @@ , lib ? pkgs.lib , name ? "nix" , tag ? "latest" +, bundleNixpkgs ? true , channelName ? "nixpkgs" , channelURL ? "https://nixos.org/channels/nixpkgs-unstable" +, extraPkgs ? [] +, maxLayers ? 100 +, nixConf ? {} }: let defaultPkgs = with pkgs; [ @@ -22,13 +26,14 @@ let findutils iana-etc git - ]; + openssh + ] ++ extraPkgs; users = { root = { uid = 0; - shell = "/bin/bash"; + shell = "${pkgs.bashInteractive}/bin/bash"; home = "/root"; gid = 0; }; @@ -120,20 +125,27 @@ let (lib.attrValues (lib.mapAttrs groupToGroup groups)) ); - nixConf = { + defaultNixConf = { sandbox = "false"; build-users-group = "nixbld"; - trusted-public-keys = "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="; + trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ]; }; - nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: "${n} = ${v}") nixConf)) + "\n"; + + nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: + let + vStr = if builtins.isList v then lib.concatStringsSep " " v else v; + in + "${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n"; baseSystem = let nixpkgs = pkgs.path; - channel = pkgs.runCommand "channel-nixos" { } '' + channel = pkgs.runCommand "channel-nixos" { inherit bundleNixpkgs; } '' mkdir $out - ln -s ${nixpkgs} $out/nixpkgs - echo "[]" > $out/manifest.nix + if [ "$bundleNixpkgs" ]; then + ln -s ${nixpkgs} $out/nixpkgs + echo "[]" > $out/manifest.nix + fi ''; rootEnv = pkgs.buildPackages.buildEnv { name = "root-profile-env"; @@ -228,7 +240,7 @@ let in pkgs.dockerTools.buildLayeredImageWithNixDb { - inherit name tag; + inherit name tag maxLayers; contents = [ baseSystem ]; diff --git a/flake.lock b/flake.lock index 61eccb73c..a66c9cb1b 100644 --- a/flake.lock +++ b/flake.lock @@ -18,17 +18,18 @@ }, "nixpkgs": { "locked": { - "lastModified": 1632864508, - "narHash": "sha256-d127FIvGR41XbVRDPVvozUPQ/uRHbHwvfyKHwEt5xFM=", + "lastModified": 1657693803, + "narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "82891b5e2c2359d7e58d08849e4c89511ab94234", + "rev": "365e1b3a859281cf11b94f87231adeabbdd878a2", "type": "github" }, "original": { - "id": "nixpkgs", - "ref": "nixos-21.05-small", - "type": "indirect" + "owner": "NixOS", + "ref": "nixos-22.05-small", + "repo": "nixpkgs", + "type": "github" } }, "nixpkgs-regression": { @@ -41,9 +42,10 @@ "type": "github" }, "original": { - "id": "nixpkgs", + "owner": "NixOS", + "repo": "nixpkgs", "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", - "type": "indirect" + "type": "github" } }, "root": { @@ -1,8 +1,8 @@ { description = "The purely functional package manager"; - inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small"; - inputs.nixpkgs-regression.url = "nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small"; + inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src }: @@ -23,7 +23,7 @@ crossSystems = [ "armv6l-linux" "armv7l-linux" ]; - stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" ]; + stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ]; forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system); forAllSystemsAndStdenvs = f: forAllSystems (system: @@ -36,7 +36,7 @@ ) ); - forAllStdenvs = stdenvs: f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv); + forAllStdenvs = f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv); # Memoize nixpkgs for different platforms for efficiency. nixpkgsFor = @@ -54,7 +54,7 @@ # we want most of the time and for backwards compatibility forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages); - commonDeps = pkgs: with pkgs; rec { + commonDeps = { pkgs, isStatic ? false }: with pkgs; rec { # Use "busybox-sandbox-shell" if present, # if not (legacy) fallback and hope it's sufficient. sh = pkgs.busybox-sandbox-shell or (busybox.override { @@ -85,10 +85,11 @@ lib.optionals stdenv.isLinux [ "--with-boost=${boost}/lib" "--with-sandbox-shell=${sh}/bin/busybox" + ] + ++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [ "LDFLAGS=-fuse-ld=gold" ]; - nativeBuildDeps = [ buildPackages.bison @@ -102,12 +103,12 @@ # Tests buildPackages.git buildPackages.mercurial # FIXME: remove? only needed for tests - buildPackages.jq + buildPackages.jq # Also for custom mdBook preprocessor. ] ++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)]; buildDeps = - [ curl + [ (curl.override { patchNetrcRegression = true; }) bzip2 xz brotli editline openssl sqlite libarchive @@ -135,11 +136,6 @@ })) nlohmann_json ]; - - perlDeps = - [ perl - perlPackages.DBDSQLite - ]; }; installScriptFor = systems: @@ -176,7 +172,7 @@ echo "file installer $out/install" >> $out/nix-support/hydra-build-products ''; - testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation { + testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation { NIX_DAEMON_PACKAGE = daemon; NIX_CLIENT_PACKAGE = client; name = @@ -264,6 +260,7 @@ echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products tar cvfJ $fn \ --owner=0 --group=0 --mode=u+rw,uga+r \ + --mtime='1970-01-01' \ --absolute-names \ --hard-dereference \ --transform "s,$TMPDIR/install,$dir/install," \ @@ -287,7 +284,7 @@ # Forward from the previous stage as we don’t want it to pick the lowdown override nixUnstable = prev.nixUnstable; - nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation { + nix = with final; with commonDeps { inherit pkgs; }; currentStdenv.mkDerivation { name = "nix-${version}"; inherit version; @@ -319,6 +316,7 @@ for LIB in $out/lib/*.dylib; do chmod u+w $LIB install_name_tool -id $LIB $LIB + install_name_tool -delete_rpath ${boost}/lib/ $LIB || true done install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib ''} @@ -353,7 +351,7 @@ strictDeps = true; - passthru.perl-bindings = with final; currentStdenv.mkDerivation { + passthru.perl-bindings = with final; perl.pkgs.toPerlModule (currentStdenv.mkDerivation { name = "nix-perl-${version}"; src = self; @@ -366,7 +364,7 @@ buildInputs = [ nix - curl + (curl.override { patchNetrcRegression = true; }) bzip2 xz pkgs.perl @@ -375,16 +373,17 @@ ++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium ++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security; - configureFlags = '' - --with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix} - --with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix} - ''; + configureFlags = [ + "--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}" + "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}" + ]; enableParallelBuilding = true; postUnpack = "sourceRoot=$sourceRoot/perl"; - }; + }); + meta.platforms = systems; }; lowdown-nix = with final; currentStdenv.mkDerivation rec { @@ -409,7 +408,7 @@ # A Nixpkgs overlay that overrides the 'nix' and # 'nix.perl-bindings' packages. - overlay = overlayFor (p: p.stdenv); + overlays.default = overlayFor (p: p.stdenv); hydraJobs = { @@ -434,7 +433,7 @@ value = let nixpkgsCross = import nixpkgs { inherit system crossSystem; - overlays = [ self.overlay ]; + overlays = [ self.overlays.default ]; }; in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross; }) crossSystems)); @@ -452,7 +451,7 @@ # Line coverage analysis. coverage = with nixpkgsFor.x86_64-linux; - with commonDeps pkgs; + with commonDeps { inherit pkgs; }; releaseTools.coverageAnalysis { name = "nix-coverage-${version}"; @@ -480,31 +479,31 @@ tests.remoteBuilds = import ./tests/remote-builds.nix { system = "x86_64-linux"; inherit nixpkgs; - inherit (self) overlay; + overlay = self.overlays.default; }; tests.nix-copy-closure = import ./tests/nix-copy-closure.nix { system = "x86_64-linux"; inherit nixpkgs; - inherit (self) overlay; + overlay = self.overlays.default; }; tests.nssPreload = (import ./tests/nss-preload.nix rec { system = "x86_64-linux"; inherit nixpkgs; - inherit (self) overlay; + overlay = self.overlays.default; }); tests.githubFlakes = (import ./tests/github-flakes.nix rec { system = "x86_64-linux"; inherit nixpkgs; - inherit (self) overlay; + overlay = self.overlays.default; }); tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec { system = "x86_64-linux"; inherit nixpkgs; - inherit (self) overlay; + overlay = self.overlays.default; }); tests.setuid = nixpkgs.lib.genAttrs @@ -512,7 +511,7 @@ (system: import ./tests/setuid.nix rec { inherit nixpkgs system; - inherit (self) overlay; + overlay = self.overlays.default; }); # Make sure that nix-env still produces the exact same result @@ -547,6 +546,11 @@ # againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable; } "touch $out"); + installerTests = import ./tests/installer { + binaryTarballs = self.hydraJobs.binaryTarball; + inherit nixpkgsFor; + }; + }; checks = forAllSystems (system: { @@ -557,12 +561,13 @@ dockerImage = self.hydraJobs.dockerImage.${system}; }); - packages = forAllSystems (system: { + packages = forAllSystems (system: rec { inherit (nixpkgsFor.${system}) nix; + default = nix; } // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) { nix-static = let nixpkgs = nixpkgsFor.${system}.pkgsStatic; - in with commonDeps nixpkgs; nixpkgs.stdenv.mkDerivation { + in with commonDeps { pkgs = nixpkgs; isStatic = true; }; nixpkgs.stdenv.mkDerivation { name = "nix-${version}"; src = self; @@ -574,14 +579,24 @@ nativeBuildInputs = nativeBuildDeps; buildInputs = buildDeps ++ propagatedDeps; - configureFlags = [ "--sysconfdir=/etc" ]; + # Work around pkgsStatic disabling all tests. + # Remove in NixOS 22.11, see https://github.com/NixOS/nixpkgs/pull/140271. + preHook = + '' + doCheck=1 + doInstallCheck=1 + ''; + + configureFlags = + configureFlags ++ + [ "--sysconfdir=/etc" + "--enable-embedded-sandbox-shell" + ]; enableParallelBuilding = true; makeFlags = "profiledir=$(out)/etc/profile.d"; - doCheck = true; - installFlags = "sysconfdir=$(out)/etc"; postInstall = '' @@ -591,7 +606,6 @@ echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products ''; - doInstallCheck = true; installCheckFlags = "sysconfdir=$(out)/etc"; stripAllList = ["bin"]; @@ -600,6 +614,7 @@ hardeningDisable = [ "pie" ]; }; + dockerImage = let pkgs = nixpkgsFor.${system}; @@ -614,14 +629,16 @@ ln -s ${image} $image echo "file binary-dist $image" >> $out/nix-support/hydra-build-products ''; - } // builtins.listToAttrs (map (crossSystem: { + } + + // builtins.listToAttrs (map (crossSystem: { name = "nix-${crossSystem}"; value = let nixpkgsCross = import nixpkgs { inherit system crossSystem; - overlays = [ self.overlay ]; + overlays = [ self.overlays.default ]; }; - in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation { + in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation { name = "nix-${version}"; src = self; @@ -653,44 +670,45 @@ doInstallCheck = true; installCheckFlags = "sysconfdir=$(out)/etc"; }; - }) crossSystems)) // (builtins.listToAttrs (map (stdenvName: + }) (if system == "x86_64-linux" then crossSystems else []))) + + // (builtins.listToAttrs (map (stdenvName: nixpkgsFor.${system}.lib.nameValuePair "nix-${stdenvName}" nixpkgsFor.${system}."${stdenvName}Packages".nix ) stdenvs))); - defaultPackage = forAllSystems (system: self.packages.${system}.nix); + devShells = forAllSystems (system: + forAllStdenvs (stdenv: + with nixpkgsFor.${system}; + with commonDeps { inherit pkgs; }; + nixpkgsFor.${system}.${stdenv}.mkDerivation { + name = "nix"; - devShell = forAllSystems (system: self.devShells.${system}.stdenvPackages); - - devShells = forAllSystemsAndStdenvs (system: stdenv: - with nixpkgsFor.${system}; - with commonDeps pkgs; - - nixpkgsFor.${system}.${stdenv}.mkDerivation { - name = "nix"; - - outputs = [ "out" "dev" "doc" ]; + outputs = [ "out" "dev" "doc" ]; - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ perlDeps; + nativeBuildInputs = nativeBuildDeps; + buildInputs = buildDeps ++ propagatedDeps ++ awsDeps; - inherit configureFlags; + inherit configureFlags; - enableParallelBuilding = true; + enableParallelBuilding = true; - installFlags = "sysconfdir=$(out)/etc"; + installFlags = "sysconfdir=$(out)/etc"; - shellHook = - '' - PATH=$prefix/bin:$PATH - unset PYTHONPATH - export MANPATH=$out/share/man:$MANPATH + shellHook = + '' + PATH=$prefix/bin:$PATH + unset PYTHONPATH + export MANPATH=$out/share/man:$MANPATH - # Make bash completion work. - XDG_DATA_DIRS+=:$out/share - ''; - }); + # Make bash completion work. + XDG_DATA_DIRS+=:$out/share + ''; + } + ) + // { default = self.devShells.${system}.stdenv; } + ); }; } diff --git a/misc/bash/completion.sh b/misc/bash/completion.sh index 045053dee..9af695f5a 100644 --- a/misc/bash/completion.sh +++ b/misc/bash/completion.sh @@ -15,7 +15,7 @@ function _complete_nix { else COMPREPLY+=("$completion") fi - done < <(NIX_GET_COMPLETIONS=$cword "${words[@]/#\~/$HOME}" 2>/dev/null) + done < <(NIX_GET_COMPLETIONS=$cword "${words[@]}" 2>/dev/null) __ltrim_colon_completions "$cur" } diff --git a/misc/systemd/local.mk b/misc/systemd/local.mk index 1fa037485..76121a0f9 100644 --- a/misc/systemd/local.mk +++ b/misc/systemd/local.mk @@ -1,7 +1,8 @@ ifdef HOST_LINUX $(foreach n, nix-daemon.socket nix-daemon.service, $(eval $(call install-file-in, $(d)/$(n), $(prefix)/lib/systemd/system, 0644))) + $(foreach n, nix-daemon.conf, $(eval $(call install-file-in, $(d)/$(n), $(prefix)/lib/tmpfiles.d, 0644))) - clean-files += $(d)/nix-daemon.socket $(d)/nix-daemon.service + clean-files += $(d)/nix-daemon.socket $(d)/nix-daemon.service $(d)/nix-daemon.conf endif diff --git a/misc/systemd/nix-daemon.conf.in b/misc/systemd/nix-daemon.conf.in new file mode 100644 index 000000000..e7b264234 --- /dev/null +++ b/misc/systemd/nix-daemon.conf.in @@ -0,0 +1 @@ +d @localstatedir@/nix/daemon-socket 0755 root root - - diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in index 25655204d..e3ac42beb 100644 --- a/misc/systemd/nix-daemon.service.in +++ b/misc/systemd/nix-daemon.service.in @@ -1,12 +1,15 @@ [Unit] Description=Nix Daemon +Documentation=man:nix-daemon https://nixos.org/manual RequiresMountsFor=@storedir@ RequiresMountsFor=@localstatedir@ +RequiresMountsFor=@localstatedir@/nix/db ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket [Service] ExecStart=@@bindir@/nix-daemon nix-daemon --daemon KillMode=process +LimitNOFILE=4096 [Install] WantedBy=multi-user.target diff --git a/misc/zsh/completion.zsh b/misc/zsh/completion.zsh index e702c721e..f9b3dca74 100644 --- a/misc/zsh/completion.zsh +++ b/misc/zsh/completion.zsh @@ -10,14 +10,15 @@ function _nix() { local -a suggestions declare -a suggestions for suggestion in ${res:1}; do - # FIXME: This doesn't work properly if the suggestion word contains a `:` - # itself - suggestions+="${suggestion/ /:}" + suggestions+=("${suggestion%% *}") done + local -a args if [[ "$tpe" == filenames ]]; then - compadd -f + args+=('-f') + elif [[ "$tpe" == attrs ]]; then + args+=('-S' '') fi - _describe 'nix' suggestions + compadd -J nix "${args[@]}" -a suggestions } _nix "$@" diff --git a/misc/zsh/local.mk b/misc/zsh/local.mk index 418fb1377..0b4e294fb 100644 --- a/misc/zsh/local.mk +++ b/misc/zsh/local.mk @@ -1 +1,2 @@ $(eval $(call install-file-as, $(d)/completion.zsh, $(datarootdir)/zsh/site-functions/_nix, 0644)) +$(eval $(call install-file-as, $(d)/run-help-nix, $(datarootdir)/zsh/site-functions/run-help-nix, 0644)) diff --git a/misc/zsh/run-help-nix b/misc/zsh/run-help-nix new file mode 100644 index 000000000..f91a304eb --- /dev/null +++ b/misc/zsh/run-help-nix @@ -0,0 +1,42 @@ +emulate -L zsh + +# run-help is a zsh widget that can be bound to a key. It mainly looks up the +# man page for the currently typed in command. +# +# Although run-help works for any command without requiring special support, +# it can only deduce the right man page based solely on the name of the +# command. Programs like Nix provide better integration with run-help by +# helping zsh identify Nix subcommands and their corresponding man pages. This +# is what this function does. +# +# To actually use run-help on zsh, place the following lines in your .zshrc: +# +# (( $+aliases[run-help] )) && unalias run-help +# autoload -Uz run-help run-help-nix +# +# Then also assign run-help to any key of choice: +# +# bindkey '^[h' run-help + +while [[ "$#" != 0 && "$1" == -* ]]; do + shift +done + +local -a subcommands; subcommands=( nix3 ) + +local arg +for arg in "$@"; do + if man -w "${(j:-:)subcommands}-$arg" >/dev/null 2>&1; then + subcommands+="$arg" + else + break + fi +done + +if (( $#subcommands > 1 )); then + man "${(j:-:)subcommands}" +else + man nix +fi + +return $? diff --git a/mk/libraries.mk b/mk/libraries.mk index ffd7b5610..6541775f3 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -91,7 +91,7 @@ define build-library $(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) + +$$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) ifndef HOST_DARWIN $(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d)) @@ -105,7 +105,7 @@ define build-library $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) $$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) + +$$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME))) ifndef HOST_DARWIN @@ -125,7 +125,7 @@ define build-library $(1)_PATH := $$(_d)/$$($(1)_NAME).a $$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/ - $$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$? + +$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$^ $$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o $(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS) diff --git a/mk/programs.mk b/mk/programs.mk index d0cf5baf0..0fc1990f7 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -32,7 +32,7 @@ define build-program $$(eval $$(call create-dir, $$(_d))) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) + +$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $(1)_INSTALL_DIR ?= $$(bindir) @@ -49,7 +49,7 @@ define build-program _libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH)) $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) + +$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) else diff --git a/mk/run_test.sh b/mk/run_test.sh index 3783d3bf7..7e95df2ac 100755 --- a/mk/run_test.sh +++ b/mk/run_test.sh @@ -14,9 +14,27 @@ if [ -t 1 ]; then yellow="[33;1m" normal="[m" fi -(cd tests && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null) -log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)" -status=$? + +run_test () { + (cd tests && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null) + log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)" + status=$? +} + +run_test "$1" + +# Hack: Retry the test if it fails with “unexpected EOF reading a line” as these +# appear randomly without anyone knowing why. +# See https://github.com/NixOS/nix/issues/3605 for more info +if [[ $status -ne 0 && $status -ne 99 && \ + "$(uname)" == "Darwin" && \ + "$log" =~ "unexpected EOF reading a line" \ +]]; then + echo "$post_run_msg [${yellow}FAIL$normal] (possibly flaky, so will be retried)" + echo "$log" | sed 's/^/ /' + run_test "$1" +fi + if [ $status -eq 0 ]; then echo "$post_run_msg [${green}PASS$normal]" elif [ $status -eq 99 ]; then diff --git a/scripts/check-hydra-status.sh b/scripts/check-hydra-status.sh index 5e2f03429..e62705e94 100644 --- a/scripts/check-hydra-status.sh +++ b/scripts/check-hydra-status.sh @@ -14,7 +14,7 @@ curl -sS -H 'Accept: application/json' https://hydra.nixos.org/jobset/nix/master someBuildFailed=0 for buildId in $BUILDS_FOR_LATEST_EVAL; do - buildInfo=$(curl -sS -H 'Accept: application/json' "https://hydra.nixos.org/build/$buildId") + buildInfo=$(curl --fail -sS -H 'Accept: application/json' "https://hydra.nixos.org/build/$buildId") finished=$(echo "$buildInfo" | jq -r '.finished') diff --git a/scripts/create-darwin-volume.sh b/scripts/create-darwin-volume.sh index 4bac4b7ba..103e1e391 100755 --- a/scripts/create-darwin-volume.sh +++ b/scripts/create-darwin-volume.sh @@ -442,9 +442,14 @@ add_nix_vol_fstab_line() { local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}" shift - # wrap `ex` to work around a problem with vim plugins breaking exit codes; - # (see https://github.com/NixOS/nix/issues/5468) - # we'd prefer EDITOR="/usr/bin/ex --noplugin" but vifs doesn't word-split + # wrap `ex` to work around problems w/ vim features breaking exit codes + # - plugins (see github.com/NixOS/nix/issues/5468): -u NONE + # - swap file: -n + # + # the first draft used `--noplugin`, but github.com/NixOS/nix/issues/6462 + # suggests we need the less-semantic `-u NONE` + # + # we'd prefer EDITOR="/usr/bin/ex -u NONE" but vifs doesn't word-split # the EDITOR env. # # TODO: at some point we should switch to `--clean`, but it wasn't added @@ -452,7 +457,7 @@ add_nix_vol_fstab_line() { # minver 10.12.6 seems to have released with vim 7.4 cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF #!/bin/sh -/usr/bin/ex --noplugin "\$@" +/usr/bin/ex -u NONE -n "\$@" EOF chmod 755 "$SCRATCH/ex_cleanroom_wrapper" @@ -646,8 +651,9 @@ EOF task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2 # technically /etc/synthetic.d/nix is supported in Big Sur+ # but handling both takes even more code... + # See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile _sudo "to add Nix to /etc/synthetic.conf" \ - /usr/bin/ex --noplugin /etc/synthetic.conf <<EOF + /usr/bin/ex -u NONE -n /etc/synthetic.conf <<EOF :a ${NIX_ROOT:1} . @@ -815,7 +821,8 @@ setup_volume_daemon() { local volume_uuid="$2" if ! test_voldaemon; then task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2 - _sudo "to install the Nix volume mounter" /usr/bin/ex --noplugin "$NIX_VOLUME_MOUNTD_DEST" <<EOF + # See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile + _sudo "to install the Nix volume mounter" /usr/bin/ex -u NONE -n "$NIX_VOLUME_MOUNTD_DEST" <<EOF :a $(generate_mount_daemon "$cmd_type" "$volume_uuid") . diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index afaa6783b..5111a5dde 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -167,7 +167,7 @@ poly_user_shell_get() { } poly_user_shell_set() { - _sudo "in order to give $1 a safe home directory" \ + _sudo "in order to give $1 a safe shell" \ /usr/bin/dscl . -create "/Users/$1" "UserShell" "$2" } diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index d3ed53d09..a39339050 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -23,10 +23,10 @@ readonly RED='\033[31m' # installer allows overriding build user count to speed up installation # as creating each user takes non-trivial amount of time on macos readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32} -readonly NIX_BUILD_GROUP_ID="30000" +readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" readonly NIX_BUILD_GROUP_NAME="nixbld" # darwin installer needs to override these -NIX_FIRST_BUILD_UID="30001" +NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" # Please don't change this. We don't support it, because the # default shell profile that comes with Nix doesn't support it. @@ -37,6 +37,19 @@ readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/e readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix" readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" +# Fish has different syntax than zsh/bash, treat it separate +readonly PROFILE_FISH_SUFFIX="conf.d/nix.fish" +readonly PROFILE_FISH_PREFIXES=( + # each of these are common values of $__fish_sysconf_dir, + # under which Fish will look for a file named + # $PROFILE_FISH_SUFFIX. + "/etc/fish" # standard + "/usr/local/etc/fish" # their installer .pkg for macOS + "/opt/homebrew/etc/fish" # homebrew + "/opt/local/etc/fish" # macports +) +readonly PROFILE_NIX_FILE_FISH="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.fish" + readonly NIX_INSTALLED_NIX="@nix@" readonly NIX_INSTALLED_CACERT="@cacert@" #readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6" @@ -59,6 +72,30 @@ headless() { fi } +is_root() { + if [ "$EUID" -eq 0 ]; then + return 0 + else + return 1 + fi +} + +is_os_linux() { + if [ "$(uname -s)" = "Linux" ]; then + return 0 + else + return 1 + fi +} + +is_os_darwin() { + if [ "$(uname -s)" = "Darwin" ]; then + return 0 + else + return 1 + fi +} + contact_us() { echo "You can open an issue at https://github.com/nixos/nix/issues" echo "" @@ -313,14 +350,23 @@ __sudo() { _sudo() { local expl="$1" shift - if ! headless; then + if ! headless || is_root; then __sudo "$expl" "$*" >&2 fi - sudo "$@" + + if is_root; then + env "$@" + else + sudo "$@" + fi } +# Ensure that $TMPDIR exists if defined. +if [[ -n "${TMPDIR:-}" ]] && [[ ! -d "${TMPDIR:-}" ]]; then + mkdir -m 0700 -p "${TMPDIR:-}" +fi -readonly SCRATCH=$(mktemp -d "${TMPDIR:-/tmp/}tmp.XXXXXXXXXX") +readonly SCRATCH=$(mktemp -d) finish_cleanup() { rm -rf "$SCRATCH" } @@ -329,7 +375,7 @@ finish_fail() { finish_cleanup failure <<EOF -Jeeze, something went wrong. If you can take all the output and open +Oh no, something went wrong. If you can take all the output and open an issue, we'd love to fix the problem so nobody else has this issue. :( @@ -423,6 +469,18 @@ EOF fi done + if is_os_linux && [ ! -e /run/systemd/system ]; then + warning <<EOF +We did not detect systemd on your system. With a multi-user install +without systemd you will have to manually configure your init system to +launch the Nix daemon after installation. +EOF + if ! ui_confirm "Do you want to proceed with a multi-user installation?"; then + failure <<EOF +You have aborted the installation. +EOF + fi + fi } setup_report() { @@ -626,6 +684,17 @@ place_channel_configuration() { fi } +check_selinux() { + if command -v getenforce > /dev/null 2>&1; then + if [ "$(getenforce)" = "Enforcing" ]; then + failure <<EOF +Nix does not work with selinux enabled yet! +see https://github.com/NixOS/nix/issues/2374 +EOF + fi + fi +} + welcome_to_nix() { ok "Welcome to the Multi-User Nix Installation" @@ -739,7 +808,7 @@ install_from_extracted_nix() { cd "$EXTRACTED_NIX_PATH" _sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \ - cp -RLp ./store/* "$NIX_ROOT/store/" + cp -RPp ./store/* "$NIX_ROOT/store/" _sudo "to make the new store non-writable at $NIX_ROOT/store" \ chmod -R ugo-w "$NIX_ROOT/store/" @@ -754,7 +823,7 @@ EOF fi _sudo "to load data for the first time in to the Nix Database" \ - "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo + HOME="$ROOT_HOME" "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo echo " Just finished getting the nix database ready." ) @@ -772,6 +841,19 @@ fi EOF } +# Fish has differing syntax +fish_source_lines() { + cat <<EOF + +# Nix +if test -e '$PROFILE_NIX_FILE_FISH' + . '$PROFILE_NIX_FILE_FISH' +end +# End Nix + +EOF +} + configure_shell_profile() { task "Setting up shell profiles: ${PROFILE_TARGETS[*]}" for profile_target in "${PROFILE_TARGETS[@]}"; do @@ -793,6 +875,27 @@ configure_shell_profile() { tee -a "$profile_target" fi done + + task "Setting up shell profiles for Fish with with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}" + for fish_prefix in "${PROFILE_FISH_PREFIXES[@]}"; do + if [ ! -d "$fish_prefix" ]; then + # this specific prefix (ie: /etc/fish) is very likely to exist + # if Fish is installed with this sysconfdir. + continue + fi + + profile_target="${fish_prefix}/${PROFILE_FISH_SUFFIX}" + conf_dir=$(dirname "$profile_target") + if [ ! -d "$conf_dir" ]; then + _sudo "create $conf_dir for our Fish hook" \ + mkdir "$conf_dir" + fi + + fish_source_lines \ + | _sudo "write nix-daemon settings to $profile_target" \ + tee "$profile_target" + done + # TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on # their way less disruptively, but a counter-argument is that they won't # immediately notice if something didn't get set up right? @@ -842,22 +945,14 @@ EOF install -m 0664 "$SCRATCH/nix.conf" /etc/nix/nix.conf } + main() { - # TODO: I've moved this out of validate_starting_assumptions so we - # can fail faster in this case. Sourcing install-darwin... now runs - # `touch /` to detect Read-only root, but it could update times on - # pre-Catalina macOS if run as root user. - if [ "$EUID" -eq 0 ]; then - failure <<EOF -Please do not run this script with root privileges. I will call sudo -when I need to. -EOF - fi + check_selinux - if [ "$(uname -s)" = "Darwin" ]; then + if is_os_darwin; then # shellcheck source=./install-darwin-multi-user.sh . "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh" - elif [ "$(uname -s)" = "Linux" ]; then + elif is_os_linux; then # shellcheck source=./install-systemd-multi-user.sh . "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also else @@ -865,7 +960,10 @@ EOF fi welcome_to_nix - chat_about_sudo + + if ! is_root; then + chat_about_sudo + fi cure_artifacts # TODO: there's a tension between cure and validate. I moved the diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh index d543b4463..d4eed2efe 100644 --- a/scripts/install-nix-from-closure.sh +++ b/scripts/install-nix-from-closure.sh @@ -148,7 +148,9 @@ if ! [ -w "$dest" ]; then exit 1 fi -mkdir -p "$dest/store" +# The auto-chroot code in openFromNonUri() checks for the +# non-existence of /nix/var/nix, so we need to create it here. +mkdir -p "$dest/store" "$dest/var/nix" printf "copying Nix to %s..." "${dest}/store" >&2 # Insert a newline if no progress is shown. @@ -207,31 +209,50 @@ if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then fi added= -p=$HOME/.nix-profile/etc/profile.d/nix.sh +p= +p_sh=$HOME/.nix-profile/etc/profile.d/nix.sh +p_fish=$HOME/.nix-profile/etc/profile.d/nix.fish if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then # Make the shell source nix.sh during login. for i in .bash_profile .bash_login .profile; do fn="$HOME/$i" if [ -w "$fn" ]; then - if ! grep -q "$p" "$fn"; then + if ! grep -q "$p_sh" "$fn"; then echo "modifying $fn..." >&2 - printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn" + printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn" fi added=1 + p=${p_sh} break fi done for i in .zshenv .zshrc; do fn="$HOME/$i" if [ -w "$fn" ]; then - if ! grep -q "$p" "$fn"; then + if ! grep -q "$p_sh" "$fn"; then echo "modifying $fn..." >&2 - printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn" + printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn" fi added=1 + p=${p_sh} break fi done + + if [ -d "$HOME/.config/fish" ]; then + fishdir=$HOME/.config/fish/conf.d + if [ ! -d "$fishdir" ]; then + mkdir -p "$fishdir" + fi + + fn="$fishdir/nix.fish" + echo "placing $fn..." >&2 + printf '\nif test -e %s; . %s; end # added by Nix installer\n' "$p_fish" "$p_fish" > "$fn" + added=1 + p=${p_fish} + fi +else + p=${p_sh} fi if [ -z "$added" ]; then diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index f4a2dfc5d..62397127a 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -9,6 +9,8 @@ readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service readonly SOCKET_SRC=/lib/systemd/system/nix-daemon.socket readonly SOCKET_DEST=/etc/systemd/system/nix-daemon.socket +readonly TMPFILES_SRC=/lib/tmpfiles.d/nix-daemon.conf +readonly TMPFILES_DEST=/etc/tmpfiles.d/nix-daemon.conf # Path for the systemd override unit file to contain the proxy settings readonly SERVICE_OVERRIDE=${SERVICE_DEST}.d/override.conf @@ -83,6 +85,13 @@ EOF poly_configure_nix_daemon_service() { if [ -e /run/systemd/system ]; then task "Setting up the nix-daemon systemd service" + + _sudo "to create the nix-daemon tmpfiles config" \ + ln -sfn /nix/var/nix/profiles/default/$TMPFILES_SRC $TMPFILES_DEST + + _sudo "to run systemd-tmpfiles once to pick that path up" \ + systemd-tmpfiles --create --prefix=/nix/var/nix + _sudo "to set up the nix-daemon service" \ systemctl link "/nix/var/nix/profiles/default$SERVICE_SRC" diff --git a/scripts/install.in b/scripts/install.in index 38d1fb36f..7d2e52b26 100755 --- a/scripts/install.in +++ b/scripts/install.in @@ -40,12 +40,12 @@ case "$(uname -s).$(uname -m)" in path=@tarballPath_aarch64-linux@ system=aarch64-linux ;; - Linux.armv6l_linux) + Linux.armv6l) hash=@tarballHash_armv6l-linux@ path=@tarballPath_armv6l-linux@ system=armv6l-linux ;; - Linux.armv7l_linux) + Linux.armv7l) hash=@tarballHash_armv7l-linux@ path=@tarballPath_armv7l-linux@ system=armv7l-linux @@ -82,7 +82,7 @@ if [ "$(uname -s)" != "Darwin" ]; then fi if command -v curl > /dev/null 2>&1; then - fetch() { curl -L "$1" -o "$2"; } + fetch() { curl --fail -L "$1" -o "$2"; } elif command -v wget > /dev/null 2>&1; then fetch() { wget "$1" -O "$2"; } else diff --git a/scripts/local.mk b/scripts/local.mk index b8477178e..46255e432 100644 --- a/scripts/local.mk +++ b/scripts/local.mk @@ -6,6 +6,8 @@ noinst-scripts += $(nix_noinst_scripts) profiledir = $(sysconfdir)/profile.d $(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644)) +$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644)) $(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644)) +$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644)) clean-files += $(nix_noinst_scripts) diff --git a/scripts/nix-profile-daemon.fish.in b/scripts/nix-profile-daemon.fish.in new file mode 100644 index 000000000..3d587dd7f --- /dev/null +++ b/scripts/nix-profile-daemon.fish.in @@ -0,0 +1,35 @@ +# Only execute this file once per shell. +if test -n "$__ETC_PROFILE_NIX_SOURCED" + exit +end + +set __ETC_PROFILE_NIX_SOURCED 1 + +set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile" + +# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. +if test -n "$NIX_SSH_CERT_FILE" + : # Allow users to override the NIX_SSL_CERT_FILE +else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch + set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt +else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed + set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem +else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS + set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt +else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS + set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt +else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile + set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" +else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile + set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt" +else + # Fall back to what is in the nix profiles, favouring whatever is defined last. + for i in $NIX_PROFILES + if test -e "$i/etc/ssl/certs/ca-bundle.crt" + set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt" + end + end +end + +fish_add_path --prepend --global "@localstatedir@/nix/profiles/default/bin" +fish_add_path --prepend --global "$HOME/.nix-profile/bin" diff --git a/scripts/nix-profile.fish.in b/scripts/nix-profile.fish.in new file mode 100644 index 000000000..8d783d7c0 --- /dev/null +++ b/scripts/nix-profile.fish.in @@ -0,0 +1,37 @@ +if test -n "$HOME" && test -n "$USER" + + # Set up the per-user profile. + + set NIX_LINK $HOME/.nix-profile + + # Set up environment. + # This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix + set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile" + + # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work. + if test -n "$NIX_SSH_CERT_FILE" + : # Allow users to override the NIX_SSL_CERT_FILE + else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch + set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt + else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed + set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem + else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS + set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt + else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS + set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt + else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile + set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" + else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile + set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt" + end + + # Only use MANPATH if it is already set. In general `man` will just simply + # pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin` + # which is in the $PATH. For more info, run `manpath -d`. + if set --query MANPATH + set --export --prepend --path MANPATH "$NIX_LINK/share/man" + end + + fish_add_path --prepend --global "$NIX_LINK/bin" + set --erase NIX_LINK +end diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in index 45cbcbe74..5636085d4 100644 --- a/scripts/nix-profile.sh.in +++ b/scripts/nix-profile.sh.in @@ -1,7 +1,6 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then # Set up the per-user profile. - # This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix NIX_LINK=$HOME/.nix-profile diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 9d2eacb54..ff8ba2724 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -14,6 +14,7 @@ #include "pathlocks.hh" #include "globals.hh" #include "serialise.hh" +#include "build-result.hh" #include "store-api.hh" #include "derivations.hh" #include "local-store.hh" @@ -299,7 +300,7 @@ connected: std::set<Realisation> missingRealisations; StorePathSet missingPaths; - if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) { + if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) { for (auto & outputName : wantedOutputs) { auto thisOutputHash = outputHashes.at(outputName); auto thisOutputId = DrvOutput{ thisOutputHash, outputName }; diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 6d183dfad..0740ea960 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -86,6 +86,12 @@ ref<Store> CopyCommand::getDstStore() EvalCommand::EvalCommand() { + addFlag({ + .longName = "debugger", + .description = "Start an interactive environment if evaluation fails.", + .category = MixEvalArgs::category, + .handler = {&startReplOnEvalErrors, true}, + }); } EvalCommand::~EvalCommand() @@ -103,7 +109,7 @@ ref<Store> EvalCommand::getEvalStore() ref<EvalState> EvalCommand::getEvalState() { - if (!evalState) + if (!evalState) { evalState = #if HAVE_BOEHMGC std::allocate_shared<EvalState>(traceable_allocator<EvalState>(), @@ -113,6 +119,11 @@ ref<EvalState> EvalCommand::getEvalState() searchPath, getEvalStore(), getStore()) #endif ; + + if (startReplOnEvalErrors) { + evalState->debugRepl = &runRepl; + }; + } return ref<EvalState>(evalState); } @@ -153,7 +164,7 @@ void BuiltPathsCommand::run(ref<Store> store) for (auto & p : store->queryAllValidPaths()) paths.push_back(BuiltPath::Opaque{p}); } else { - paths = toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables); + paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables); if (recursive) { // XXX: This only computes the store path closure, ignoring // intermediate realisations @@ -197,16 +208,17 @@ void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePath run(store, *storePaths.begin()); } -Strings editorFor(const Pos & pos) +Strings editorFor(const Path & file, uint32_t line) { auto editor = getEnv("EDITOR").value_or("cat"); auto args = tokenizeString<Strings>(editor); - if (pos.line > 0 && ( + if (line > 0 && ( editor.find("emacs") != std::string::npos || editor.find("nano") != std::string::npos || - editor.find("vim") != std::string::npos)) - args.push_back(fmt("+%d", pos.line)); - args.push_back(pos.file); + editor.find("vim") != std::string::npos || + editor.find("kak") != std::string::npos)) + args.push_back(fmt("+%d", line)); + args.push_back(file); return args; } @@ -214,7 +226,7 @@ MixProfile::MixProfile() { addFlag({ .longName = "profile", - .description = "The profile to update.", + .description = "The profile to operate on.", .labels = {"path"}, .handler = {&profile}, .completer = completePath diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index bd2a0a7ee..3b4b40981 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -5,7 +5,6 @@ #include "common-eval-args.hh" #include "path.hh" #include "flake/lockfile.hh" -#include "store-api.hh" #include <optional> @@ -58,6 +57,9 @@ struct CopyCommand : virtual StoreCommand struct EvalCommand : virtual StoreCommand, MixEvalArgs { + bool startReplOnEvalErrors = false; + bool ignoreExceptionsDuringTry = false; + EvalCommand(); ~EvalCommand(); @@ -76,29 +78,28 @@ struct MixFlakeOptions : virtual Args, EvalCommand { flake::LockFlags lockFlags; + std::optional<std::string> needsFlakeInputCompletion = {}; + MixFlakeOptions(); - virtual std::optional<FlakeRef> getFlakeRefForCompletion() + virtual std::vector<std::string> getFlakesForCompletion() { return {}; } -}; -/* How to handle derivations in commands that operate on store paths. */ -enum class OperateOn { - /* Operate on the output path. */ - Output, - /* Operate on the .drv path. */ - Derivation + void completeFlakeInput(std::string_view prefix); + + void completionHook() override; }; struct SourceExprCommand : virtual Args, MixFlakeOptions { std::optional<Path> file; std::optional<std::string> expr; + bool readOnlyMode = false; // FIXME: move this; not all commands (e.g. 'nix run') use it. OperateOn operateOn = OperateOn::Output; - SourceExprCommand(); + SourceExprCommand(bool supportReadOnlyMode = false); std::vector<std::shared_ptr<Installable>> parseInstallables( ref<Store> store, std::vector<std::string> ss); @@ -113,19 +114,6 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions void completeInstallable(std::string_view prefix); }; -enum class Realise { - /* Build the derivation. Postcondition: the - derivation outputs exist. */ - Outputs, - /* Don't build the derivation. Postcondition: the store derivation - exists. */ - Derivation, - /* Evaluate in dry-run mode. Postcondition: nothing. */ - // FIXME: currently unused, but could be revived if we can - // evaluate derivations in-memory. - Nothing -}; - /* A command that operates on a list of "installables", which can be store paths, attribute paths, Nix expressions, etc. */ struct InstallablesCommand : virtual Args, SourceExprCommand @@ -135,12 +123,13 @@ struct InstallablesCommand : virtual Args, SourceExprCommand InstallablesCommand(); void prepare() override; + Installables load(); virtual bool useDefaultInstallables() { return true; } - std::optional<FlakeRef> getFlakeRefForCompletion() override; + std::vector<std::string> getFlakesForCompletion() override; -private: +protected: std::vector<std::string> _installables; }; @@ -150,13 +139,13 @@ struct InstallableCommand : virtual Args, SourceExprCommand { std::shared_ptr<Installable> installable; - InstallableCommand(); + InstallableCommand(bool supportReadOnlyMode = false); void prepare() override; - std::optional<FlakeRef> getFlakeRefForCompletion() override + std::vector<std::string> getFlakesForCompletion() override { - return parseFlakeRef(_installable, absPath(".")); + return {_installable}; } private: @@ -238,41 +227,9 @@ static RegisterCommand registerCommand2(std::vector<std::string> && name) return RegisterCommand(std::move(name), [](){ return make_ref<T>(); }); } -BuiltPaths build( - ref<Store> evalStore, - ref<Store> store, Realise mode, - const std::vector<std::shared_ptr<Installable>> & installables, - BuildMode bMode = bmNormal); - -std::set<StorePath> toStorePaths( - ref<Store> evalStore, - ref<Store> store, - Realise mode, - OperateOn operateOn, - const std::vector<std::shared_ptr<Installable>> & installables); - -StorePath toStorePath( - ref<Store> evalStore, - ref<Store> store, - Realise mode, - OperateOn operateOn, - std::shared_ptr<Installable> installable); - -std::set<StorePath> toDerivations( - ref<Store> store, - const std::vector<std::shared_ptr<Installable>> & installables, - bool useDeriver = false); - -BuiltPaths toBuiltPaths( - ref<Store> evalStore, - ref<Store> store, - Realise mode, - OperateOn operateOn, - const std::vector<std::shared_ptr<Installable>> & installables); - /* Helper function to generate args that invoke $EDITOR on filename:lineno. */ -Strings editorFor(const Pos & pos); +Strings editorFor(const Path & file, uint32_t line); struct MixProfile : virtual StoreCommand { @@ -323,4 +280,8 @@ void printClosureDiff( const StorePath & afterPath, std::string_view indent); + +void runRepl( + ref<EvalState> evalState, + const ValMap & extraEnv); } diff --git a/src/libexpr/common-eval-args.cc b/src/libcmd/common-eval-args.cc index e50ff244c..140ed3b88 100644 --- a/src/libexpr/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -7,13 +7,12 @@ #include "registry.hh" #include "flake/flakeref.hh" #include "store-api.hh" +#include "command.hh" namespace nix { MixEvalArgs::MixEvalArgs() { - auto category = "Common evaluation options"; - addFlag({ .longName = "arg", .description = "Pass the value *expr* as the argument *name* to Nix functions.", @@ -59,6 +58,9 @@ MixEvalArgs::MixEvalArgs() fetchers::Attrs extraAttrs; if (to.subdir != "") extraAttrs["dir"] = to.subdir; fetchers::overrideRegistry(from.input, to.input, extraAttrs); + }}, + .completer = {[&](size_t, std::string_view prefix) { + completeFlakeRef(openStore(), prefix); }} }); diff --git a/src/libexpr/common-eval-args.hh b/src/libcmd/common-eval-args.hh index 03fa226aa..1ec800613 100644 --- a/src/libexpr/common-eval-args.hh +++ b/src/libcmd/common-eval-args.hh @@ -10,6 +10,8 @@ class Bindings; struct MixEvalArgs : virtual Args { + static constexpr auto category = "Common evaluation options"; + MixEvalArgs(); Bindings * getAutoArgs(EvalState & state); diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index c07e39628..e097f23b3 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -1,4 +1,6 @@ +#include "globals.hh" #include "installables.hh" +#include "util.hh" #include "command.hh" #include "attr-path.hh" #include "common-eval-args.hh" @@ -12,6 +14,7 @@ #include "eval-cache.hh" #include "url.hh" #include "registry.hh" +#include "build-result.hh" #include <regex> #include <queue> @@ -20,17 +23,6 @@ namespace nix { -void completeFlakeInputPath( - ref<EvalState> evalState, - const FlakeRef & flakeRef, - std::string_view prefix) -{ - auto flake = flake::getFlake(*evalState, flakeRef, true); - for (auto & input : flake.inputs) - if (hasPrefix(input.first, prefix)) - completions->add(input.first); -} - MixFlakeOptions::MixFlakeOptions() { auto category = "Common flake-related options"; @@ -83,8 +75,7 @@ MixFlakeOptions::MixFlakeOptions() lockFlags.inputUpdates.insert(flake::parseInputPath(s)); }}, .completer = {[&](size_t, std::string_view prefix) { - if (auto flakeRef = getFlakeRefForCompletion()) - completeFlakeInputPath(getEvalState(), *flakeRef, prefix); + needsFlakeInputCompletion = {std::string(prefix)}; }} }); @@ -98,6 +89,12 @@ MixFlakeOptions::MixFlakeOptions() lockFlags.inputOverrides.insert_or_assign( flake::parseInputPath(inputPath), parseFlakeRef(flakeRef, absPath("."), true)); + }}, + .completer = {[&](size_t n, std::string_view prefix) { + if (n == 0) + needsFlakeInputCompletion = {std::string(prefix)}; + else if (n == 1) + completeFlakeRef(getEvalState()->store, prefix); }} }); @@ -128,12 +125,33 @@ MixFlakeOptions::MixFlakeOptions() }); } -SourceExprCommand::SourceExprCommand() +void MixFlakeOptions::completeFlakeInput(std::string_view prefix) +{ + auto evalState = getEvalState(); + for (auto & flakeRefS : getFlakesForCompletion()) { + auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first; + auto flake = flake::getFlake(*evalState, flakeRef, true); + for (auto & input : flake.inputs) + if (hasPrefix(input.first, prefix)) + completions->add(input.first); + } +} + +void MixFlakeOptions::completionHook() +{ + if (auto & prefix = needsFlakeInputCompletion) + completeFlakeInput(*prefix); +} + +SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode) { addFlag({ .longName = "file", .shortName = 'f', - .description = "Interpret installables as attribute paths relative to the Nix expression stored in *file*.", + .description = + "Interpret installables as attribute paths relative to the Nix expression stored in *file*. " + "If *file* is the character -, then a Nix expression will be read from standard input. " + "Implies `--impure`.", .category = installablesCategory, .labels = {"file"}, .handler = {&file}, @@ -154,6 +172,17 @@ SourceExprCommand::SourceExprCommand() .category = installablesCategory, .handler = {&operateOn, OperateOn::Derivation}, }); + + if (supportReadOnlyMode) { + addFlag({ + .longName = "read-only", + .description = + "Do not instantiate each evaluated derivation. " + "This improves performance, but can cause errors when accessing " + "store paths of derivations during evaluation.", + .handler = {&readOnlyMode, true}, + }); + } } Strings SourceExprCommand::getDefaultFlakeAttrPaths() @@ -179,6 +208,8 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes() void SourceExprCommand::completeInstallable(std::string_view prefix) { if (file) { + completionType = ctAttrs; + evalSettings.pureEval = false; auto state = getEvalState(); Expr *e = state->parseExprFromFile( @@ -207,13 +238,14 @@ void SourceExprCommand::completeInstallable(std::string_view prefix) Value v2; state->autoCallFunction(*autoArgs, v1, v2); - completionType = ctAttrs; - if (v2.type() == nAttrs) { for (auto & i : *v2.attrs) { - std::string name = i.name; + std::string name = state->symbols[i.name]; if (name.find(searchWord) == 0) { - completions->add(i.name); + if (prefix_ == "") + completions->add(name); + else + completions->add(prefix_ + "." + name); } } } @@ -241,10 +273,11 @@ void completeFlakeRefWithFragment( if (hash == std::string::npos) { completeFlakeRef(evalState->store, prefix); } else { + completionType = ctAttrs; + auto fragment = prefix.substr(hash + 1); auto flakeRefS = std::string(prefix.substr(0, hash)); - // FIXME: do tilde expansion. - auto flakeRef = parseFlakeRef(flakeRefS, absPath(".")); + auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath(".")); auto evalCache = openEvalCache(*evalState, std::make_shared<flake::LockedFlake>(lockFlake(*evalState, flakeRef, lockFlags))); @@ -256,8 +289,6 @@ void completeFlakeRefWithFragment( flake. */ attrPathPrefixes.push_back(""); - completionType = ctAttrs; - for (auto & attrPathPrefixS : attrPathPrefixes) { auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS); auto attrPathS = attrPathPrefixS + std::string(fragment); @@ -265,19 +296,19 @@ void completeFlakeRefWithFragment( std::string lastAttr; if (!attrPath.empty() && !hasSuffix(attrPathS, ".")) { - lastAttr = attrPath.back(); + lastAttr = evalState->symbols[attrPath.back()]; attrPath.pop_back(); } auto attr = root->findAlongAttrPath(attrPath); if (!attr) continue; - for (auto & attr2 : attr->getAttrs()) { - if (hasPrefix(attr2, lastAttr)) { - auto attrPath2 = attr->getAttrPath(attr2); + for (auto & attr2 : (*attr)->getAttrs()) { + if (hasPrefix(evalState->symbols[attr2], lastAttr)) { + auto attrPath2 = (*attr)->getAttrPath(attr2); /* Strip the attrpath prefix. */ attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size()); - completions->add(flakeRefS + "#" + concatStringsSep(".", attrPath2)); + completions->add(flakeRefS + "#" + concatStringsSep(".", evalState->symbols.resolve(attrPath2))); } } } @@ -331,16 +362,16 @@ DerivedPath Installable::toDerivedPath() return std::move(buildables[0]); } -std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> +std::vector<ref<eval_cache::AttrCursor>> Installable::getCursors(EvalState & state) { auto evalCache = std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state, [&]() { return toValue(state).first; }); - return {{evalCache->getRoot(), ""}}; + return {evalCache->getRoot()}; } -std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string> +ref<eval_cache::AttrCursor> Installable::getCursor(EvalState & state) { auto cursors = getCursors(state); @@ -414,10 +445,8 @@ DerivedPaths InstallableValue::toDerivedPaths() // Group by derivation, helps with .all in particular for (auto & drv : toDerivations()) { - auto outputName = drv.outputName; - if (outputName == "") - throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(drv.drvPath)); - drvsToOutputs[drv.drvPath].insert(outputName); + for (auto & outputName : drv.outputsToInstall) + drvsToOutputs[drv.drvPath].insert(outputName); drvsToCopy.insert(drv.drvPath); } @@ -440,14 +469,24 @@ struct InstallableAttrPath : InstallableValue SourceExprCommand & cmd; RootValue v; std::string attrPath; - - InstallableAttrPath(ref<EvalState> state, SourceExprCommand & cmd, Value * v, const std::string & attrPath) - : InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath) + OutputsSpec outputsSpec; + + InstallableAttrPath( + ref<EvalState> state, + SourceExprCommand & cmd, + Value * v, + const std::string & attrPath, + OutputsSpec outputsSpec) + : InstallableValue(state) + , cmd(cmd) + , v(allocRootValue(v)) + , attrPath(attrPath) + , outputsSpec(std::move(outputsSpec)) { } std::string what() const override { return attrPath; } - std::pair<Value *, Pos> toValue(EvalState & state) override + std::pair<Value *, PosIdx> toValue(EvalState & state) override { auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v); state.forceValue(*vRes, pos); @@ -468,10 +507,21 @@ std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations std::vector<DerivationInfo> res; for (auto & drvInfo : drvInfos) { - res.push_back({ - state->store->parseStorePath(drvInfo.queryDrvPath()), - state->store->maybeParseStorePath(drvInfo.queryOutPath()), - drvInfo.queryOutputName() + auto drvPath = drvInfo.queryDrvPath(); + if (!drvPath) + throw Error("'%s' is not a derivation", what()); + + std::set<std::string> outputsToInstall; + + if (auto outputNames = std::get_if<OutputNames>(&outputsSpec)) + outputsToInstall = *outputNames; + else + for (auto & output : drvInfo.queryOutputs(false, std::get_if<DefaultOutputs>(&outputsSpec))) + outputsToInstall.insert(output.first); + + res.push_back(DerivationInfo { + .drvPath = *drvPath, + .outputsToInstall = std::move(outputsToInstall) }); } @@ -549,6 +599,7 @@ InstallableFlake::InstallableFlake( ref<EvalState> state, FlakeRef && flakeRef, std::string_view fragment, + OutputsSpec outputsSpec, Strings attrPaths, Strings prefixes, const flake::LockFlags & lockFlags) @@ -556,6 +607,7 @@ InstallableFlake::InstallableFlake( flakeRef(flakeRef), attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}), prefixes(fragment == "" ? Strings{} : prefixes), + outputsSpec(std::move(outputsSpec)), lockFlags(lockFlags) { if (cmd && cmd->getAutoArgs(*state)->size()) @@ -564,37 +616,55 @@ InstallableFlake::InstallableFlake( std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation() { - auto lockedFlake = getLockedFlake(); + Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what())); - auto cache = openEvalCache(*state, lockedFlake); - auto root = cache->getRoot(); + auto attr = getCursor(*state); - for (auto & attrPath : getActualAttrPaths()) { - debug("trying flake output attribute '%s'", attrPath); + auto attrPath = attr->getAttrPathStr(); - auto attr = root->findAlongAttrPath( - parseAttrPath(*state, attrPath), - true - ); + if (!attr->isDerivation()) + throw Error("flake output attribute '%s' is not a derivation", attrPath); - if (!attr) continue; + auto drvPath = attr->forceDerivation(); - if (!attr->isDerivation()) - throw Error("flake output attribute '%s' is not a derivation", attrPath); + std::set<std::string> outputsToInstall; + std::optional<NixInt> priority; - auto drvPath = attr->forceDerivation(); + if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) { + if (aOutputSpecified->getBool()) { + if (auto aOutputName = attr->maybeGetAttr("outputName")) + outputsToInstall = { aOutputName->getString() }; + } + } - auto drvInfo = DerivationInfo{ - std::move(drvPath), - state->store->maybeParseStorePath(attr->getAttr(state->sOutPath)->getString()), - attr->getAttr(state->sOutputName)->getString() - }; + else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) { + if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall")) + for (auto & s : aOutputsToInstall->getListOfStrings()) + outputsToInstall.insert(s); + if (auto aPriority = aMeta->maybeGetAttr("priority")) + priority = aPriority->getInt(); + } - return {attrPath, lockedFlake->flake.lockedRef, std::move(drvInfo)}; + if (outputsToInstall.empty() || std::get_if<AllOutputs>(&outputsSpec)) { + outputsToInstall.clear(); + if (auto aOutputs = attr->maybeGetAttr(state->sOutputs)) + for (auto & s : aOutputs->getListOfStrings()) + outputsToInstall.insert(s); } - throw Error("flake '%s' does not provide attribute %s", - flakeRef, showAttrPaths(getActualAttrPaths())); + if (outputsToInstall.empty()) + outputsToInstall.insert("out"); + + if (auto outputNames = std::get_if<OutputNames>(&outputsSpec)) + outputsToInstall = *outputNames; + + auto drvInfo = DerivationInfo { + .drvPath = std::move(drvPath), + .outputsToInstall = std::move(outputsToInstall), + .priority = priority, + }; + + return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)}; } std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations() @@ -604,28 +674,12 @@ std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations() return res; } -std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state) +std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state) { - auto lockedFlake = getLockedFlake(); - - auto vOutputs = getFlakeOutputs(state, *lockedFlake); - - auto emptyArgs = state.allocBindings(0); - - for (auto & attrPath : getActualAttrPaths()) { - try { - auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs); - state.forceValue(*v, pos); - return {v, pos}; - } catch (AttrPathNotFound & e) { - } - } - - throw Error("flake '%s' does not provide attribute %s", - flakeRef, showAttrPaths(getActualAttrPaths())); + return {&getCursor(state)->forceValue(), noPos}; } -std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> +std::vector<ref<eval_cache::AttrCursor>> InstallableFlake::getCursors(EvalState & state) { auto evalCache = openEvalCache(state, @@ -633,21 +687,55 @@ InstallableFlake::getCursors(EvalState & state) auto root = evalCache->getRoot(); - std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> res; + std::vector<ref<eval_cache::AttrCursor>> res; for (auto & attrPath : getActualAttrPaths()) { auto attr = root->findAlongAttrPath(parseAttrPath(state, attrPath)); - if (attr) res.push_back({attr, attrPath}); + if (attr) res.push_back(ref(*attr)); } return res; } +ref<eval_cache::AttrCursor> InstallableFlake::getCursor(EvalState & state) +{ + auto lockedFlake = getLockedFlake(); + + auto cache = openEvalCache(state, lockedFlake); + auto root = cache->getRoot(); + + Suggestions suggestions; + + auto attrPaths = getActualAttrPaths(); + + for (auto & attrPath : attrPaths) { + debug("trying flake output attribute '%s'", attrPath); + + auto attrOrSuggestions = root->findAlongAttrPath( + parseAttrPath(state, attrPath), + true + ); + + if (!attrOrSuggestions) { + suggestions += attrOrSuggestions.getSuggestions(); + continue; + } + + return *attrOrSuggestions; + } + + throw Error( + suggestions, + "flake '%s' does not provide attribute %s", + flakeRef, + showAttrPaths(attrPaths)); +} + std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const { - flake::LockFlags lockFlagsApplyConfig = lockFlags; - lockFlagsApplyConfig.applyNixConfig = true; if (!_lockedFlake) { + flake::LockFlags lockFlagsApplyConfig = lockFlags; + lockFlagsApplyConfig.applyNixConfig = true; _lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlagsApplyConfig)); } return _lockedFlake; @@ -672,6 +760,10 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables( { std::vector<std::shared_ptr<Installable>> result; + if (readOnlyMode) { + settings.readOnlyMode = true; + } + if (file || expr) { if (file && expr) throw UsageError("'--file' and '--expr' are exclusive"); @@ -682,15 +774,24 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables( auto state = getEvalState(); auto vFile = state->allocValue(); - if (file) + if (file == "-") { + auto e = state->parseStdin(); + state->eval(e, *vFile); + } else if (file) state->evalFile(lookupFileArg(*state, *file), *vFile); else { auto e = state->parseExprFromString(*expr, absPath(".")); state->eval(e, *vFile); } - for (auto & s : ss) - result.push_back(std::make_shared<InstallableAttrPath>(state, *this, vFile, s == "." ? "" : s)); + for (auto & s : ss) { + auto [prefix, outputsSpec] = parseOutputsSpec(s); + result.push_back( + std::make_shared<InstallableAttrPath>( + state, *this, vFile, + prefix == "." ? "" : prefix, + outputsSpec)); + } } else { @@ -709,12 +810,13 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables( } try { - auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath(".")); + auto [flakeRef, fragment, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(s, absPath(".")); result.push_back(std::make_shared<InstallableFlake>( this, getEvalState(), std::move(flakeRef), fragment, + outputsSpec, getDefaultFlakeAttrPaths(), getDefaultFlakeAttrPathPrefixes(), lockFlags)); @@ -738,56 +840,20 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable( return installables.front(); } -BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths) +BuiltPaths Installable::build( + ref<Store> evalStore, + ref<Store> store, + Realise mode, + const std::vector<std::shared_ptr<Installable>> & installables, + BuildMode bMode) { BuiltPaths res; - for (const auto & b : hopefullyBuiltPaths) - std::visit( - overloaded{ - [&](const DerivedPath::Opaque & bo) { - res.push_back(BuiltPath::Opaque{bo.path}); - }, - [&](const DerivedPath::Built & bfd) { - OutputPathMap outputs; - auto drv = evalStore->readDerivation(bfd.drvPath); - auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive - auto drvOutputs = drv.outputsAndOptPaths(*store); - for (auto & output : bfd.outputs) { - if (!outputHashes.count(output)) - throw Error( - "the derivation '%s' doesn't have an output named '%s'", - store->printStorePath(bfd.drvPath), output); - if (settings.isExperimentalFeatureEnabled( - Xp::CaDerivations)) { - auto outputId = - DrvOutput{outputHashes.at(output), output}; - auto realisation = - store->queryRealisation(outputId); - if (!realisation) - throw Error( - "cannot operate on an output of unbuilt " - "content-addressed derivation '%s'", - outputId.to_string()); - outputs.insert_or_assign( - output, realisation->outPath); - } else { - // If ca-derivations isn't enabled, assume that - // the output path is statically known. - assert(drvOutputs.count(output)); - assert(drvOutputs.at(output).second); - outputs.insert_or_assign( - output, *drvOutputs.at(output).second); - } - } - res.push_back(BuiltPath::Built{bfd.drvPath, outputs}); - }, - }, - b.raw()); - + for (auto & [_, builtPath] : build2(evalStore, store, mode, installables, bMode)) + res.push_back(builtPath); return res; } -BuiltPaths build( +std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::build2( ref<Store> evalStore, ref<Store> store, Realise mode, @@ -798,21 +864,101 @@ BuiltPaths build( settings.readOnlyMode = true; std::vector<DerivedPath> pathsToBuild; + std::map<DerivedPath, std::vector<std::shared_ptr<Installable>>> backmap; for (auto & i : installables) { - auto b = i->toDerivedPaths(); - pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end()); + for (auto b : i->toDerivedPaths()) { + pathsToBuild.push_back(b); + backmap[b].push_back(i); + } } - if (mode == Realise::Nothing || mode == Realise::Derivation) + std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> res; + + switch (mode) { + + case Realise::Nothing: + case Realise::Derivation: printMissing(store, pathsToBuild, lvlError); - else if (mode == Realise::Outputs) - store->buildPaths(pathsToBuild, bMode, evalStore); - return getBuiltPaths(evalStore, store, pathsToBuild); + for (auto & path : pathsToBuild) { + for (auto & installable : backmap[path]) { + std::visit(overloaded { + [&](const DerivedPath::Built & bfd) { + OutputPathMap outputs; + auto drv = evalStore->readDerivation(bfd.drvPath); + auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive + auto drvOutputs = drv.outputsAndOptPaths(*store); + for (auto & output : bfd.outputs) { + auto outputHash = get(outputHashes, output); + if (!outputHash) + throw Error( + "the derivation '%s' doesn't have an output named '%s'", + store->printStorePath(bfd.drvPath), output); + if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) { + DrvOutput outputId { *outputHash, output }; + auto realisation = store->queryRealisation(outputId); + if (!realisation) + throw Error( + "cannot operate on an output of the " + "unbuilt derivation '%s'", + outputId.to_string()); + outputs.insert_or_assign(output, realisation->outPath); + } else { + // If ca-derivations isn't enabled, assume that + // the output path is statically known. + auto drvOutput = get(drvOutputs, output); + assert(drvOutput); + assert(drvOutput->second); + outputs.insert_or_assign( + output, *drvOutput->second); + } + } + res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }}); + }, + [&](const DerivedPath::Opaque & bo) { + res.push_back({installable, BuiltPath::Opaque { bo.path }}); + }, + }, path.raw()); + } + } + + break; + + case Realise::Outputs: { + if (settings.printMissing) + printMissing(store, pathsToBuild, lvlInfo); + + for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) { + if (!buildResult.success()) + buildResult.rethrow(); + + for (auto & installable : backmap[buildResult.path]) { + std::visit(overloaded { + [&](const DerivedPath::Built & bfd) { + std::map<std::string, StorePath> outputs; + for (auto & path : buildResult.builtOutputs) + outputs.emplace(path.first.outputName, path.second.outPath); + res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }}); + }, + [&](const DerivedPath::Opaque & bo) { + res.push_back({installable, BuiltPath::Opaque { bo.path }}); + }, + }, buildResult.path.raw()); + } + } + + break; + } + + default: + assert(false); + } + + return res; } -BuiltPaths toBuiltPaths( +BuiltPaths Installable::toBuiltPaths( ref<Store> evalStore, ref<Store> store, Realise mode, @@ -820,19 +966,19 @@ BuiltPaths toBuiltPaths( const std::vector<std::shared_ptr<Installable>> & installables) { if (operateOn == OperateOn::Output) - return build(evalStore, store, mode, installables); + return Installable::build(evalStore, store, mode, installables); else { if (mode == Realise::Nothing) settings.readOnlyMode = true; BuiltPaths res; - for (auto & drvPath : toDerivations(store, installables, true)) + for (auto & drvPath : Installable::toDerivations(store, installables, true)) res.push_back(BuiltPath::Opaque{drvPath}); return res; } } -StorePathSet toStorePaths( +StorePathSet Installable::toStorePaths( ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, @@ -846,7 +992,7 @@ StorePathSet toStorePaths( return outPaths; } -StorePath toStorePath( +StorePath Installable::toStorePath( ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, @@ -860,7 +1006,7 @@ StorePath toStorePath( return *paths.begin(); } -StorePathSet toDerivations( +StorePathSet Installable::toDerivations( ref<Store> store, const std::vector<std::shared_ptr<Installable>> & installables, bool useDeriver) @@ -896,24 +1042,30 @@ InstallablesCommand::InstallablesCommand() void InstallablesCommand::prepare() { + installables = load(); +} + +Installables InstallablesCommand::load() { + Installables installables; if (_installables.empty() && useDefaultInstallables()) - // FIXME: commands like "nix install" should not have a + // FIXME: commands like "nix profile install" should not have a // default, probably. _installables.push_back("."); - installables = parseInstallables(getStore(), _installables); + return parseInstallables(getStore(), _installables); } -std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion() +std::vector<std::string> InstallablesCommand::getFlakesForCompletion() { if (_installables.empty()) { if (useDefaultInstallables()) - return parseFlakeRef(".", absPath(".")); + return {"."}; return {}; } - return parseFlakeRef(_installables.front(), absPath(".")); + return _installables; } -InstallableCommand::InstallableCommand() +InstallableCommand::InstallableCommand(bool supportReadOnlyMode) + : SourceExprCommand(supportReadOnlyMode) { expectArgs({ .label = "installable", diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index 3d2563e4b..948f78919 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -5,6 +5,7 @@ #include "path-with-outputs.hh" #include "derived-path.hh" #include "eval.hh" +#include "store-api.hh" #include "flake/flake.hh" #include <optional> @@ -29,6 +30,27 @@ struct UnresolvedApp App resolve(ref<Store> evalStore, ref<Store> store); }; +enum class Realise { + /* Build the derivation. Postcondition: the + derivation outputs exist. */ + Outputs, + /* Don't build the derivation. Postcondition: the store derivation + exists. */ + Derivation, + /* Evaluate in dry-run mode. Postcondition: nothing. */ + // FIXME: currently unused, but could be revived if we can + // evaluate derivations in-memory. + Nothing +}; + +/* How to handle derivations in commands that operate on store paths. */ +enum class OperateOn { + /* Operate on the output path. */ + Output, + /* Operate on the .drv path. */ + Derivation +}; + struct Installable { virtual ~Installable() { } @@ -46,7 +68,7 @@ struct Installable UnresolvedApp toApp(EvalState & state); - virtual std::pair<Value *, Pos> toValue(EvalState & state) + virtual std::pair<Value *, PosIdx> toValue(EvalState & state) { throw Error("argument '%s' cannot be evaluated", what()); } @@ -58,18 +80,60 @@ struct Installable return {}; } - virtual std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> + virtual std::vector<ref<eval_cache::AttrCursor>> getCursors(EvalState & state); - std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string> + virtual ref<eval_cache::AttrCursor> getCursor(EvalState & state); virtual FlakeRef nixpkgsFlakeRef() const { return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}}); } + + static BuiltPaths build( + ref<Store> evalStore, + ref<Store> store, + Realise mode, + const std::vector<std::shared_ptr<Installable>> & installables, + BuildMode bMode = bmNormal); + + static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> build2( + ref<Store> evalStore, + ref<Store> store, + Realise mode, + const std::vector<std::shared_ptr<Installable>> & installables, + BuildMode bMode = bmNormal); + + static std::set<StorePath> toStorePaths( + ref<Store> evalStore, + ref<Store> store, + Realise mode, + OperateOn operateOn, + const std::vector<std::shared_ptr<Installable>> & installables); + + static StorePath toStorePath( + ref<Store> evalStore, + ref<Store> store, + Realise mode, + OperateOn operateOn, + std::shared_ptr<Installable> installable); + + static std::set<StorePath> toDerivations( + ref<Store> store, + const std::vector<std::shared_ptr<Installable>> & installables, + bool useDeriver = false); + + static BuiltPaths toBuiltPaths( + ref<Store> evalStore, + ref<Store> store, + Realise mode, + OperateOn operateOn, + const std::vector<std::shared_ptr<Installable>> & installables); }; +typedef std::vector<std::shared_ptr<Installable>> Installables; + struct InstallableValue : Installable { ref<EvalState> state; @@ -79,8 +143,8 @@ struct InstallableValue : Installable struct DerivationInfo { StorePath drvPath; - std::optional<StorePath> outPath; - std::string outputName; + std::set<std::string> outputsToInstall; + std::optional<NixInt> priority; }; virtual std::vector<DerivationInfo> toDerivations() = 0; @@ -95,6 +159,7 @@ struct InstallableFlake : InstallableValue FlakeRef flakeRef; Strings attrPaths; Strings prefixes; + OutputsSpec outputsSpec; const flake::LockFlags & lockFlags; mutable std::shared_ptr<flake::LockedFlake> _lockedFlake; @@ -103,6 +168,7 @@ struct InstallableFlake : InstallableValue ref<EvalState> state, FlakeRef && flakeRef, std::string_view fragment, + OutputsSpec outputsSpec, Strings attrPaths, Strings prefixes, const flake::LockFlags & lockFlags); @@ -117,11 +183,17 @@ struct InstallableFlake : InstallableValue std::vector<DerivationInfo> toDerivations() override; - std::pair<Value *, Pos> toValue(EvalState & state) override; + std::pair<Value *, PosIdx> toValue(EvalState & state) override; - std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> + /* Get a cursor to every attrpath in getActualAttrPaths() that + exists. */ + std::vector<ref<eval_cache::AttrCursor>> getCursors(EvalState & state) override; + /* Get a cursor to the first attrpath in getActualAttrPaths() that + exists, or throw an exception with suggestions if none exists. */ + ref<eval_cache::AttrCursor> getCursor(EvalState & state) override; + std::shared_ptr<flake::LockedFlake> getLockedFlake() const; FlakeRef nixpkgsFlakeRef() const override; diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk index 7a2f83cc7..3a4de6bcb 100644 --- a/src/libcmd/local.mk +++ b/src/libcmd/local.mk @@ -6,9 +6,9 @@ libcmd_DIR := $(d) libcmd_SOURCES := $(wildcard $(d)/*.cc) -libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers +libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix -libcmd_LDFLAGS += $(LOWDOWN_LIBS) -pthread +libcmd_LDFLAGS = $(EDITLINE_LIBS) -llowdown -pthread libcmd_LIBS = libstore libutil libexpr libmain libfetchers diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc index 29bb4d31e..668a07763 100644 --- a/src/libcmd/markdown.cc +++ b/src/libcmd/markdown.cc @@ -9,14 +9,16 @@ namespace nix { std::string renderMarkdownToTerminal(std::string_view markdown) { + int windowWidth = getWindowSize().second; + struct lowdown_opts opts { .type = LOWDOWN_TERM, .maxdepth = 20, - .cols = std::max(getWindowSize().second, (unsigned short) 80), + .cols = (size_t) std::max(windowWidth - 5, 60), .hmargin = 0, .vmargin = 0, .feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES, - .oflags = 0, + .oflags = LOWDOWN_TERM_NOLINK, }; auto doc = lowdown_doc_new(&opts); diff --git a/src/nix/repl.cc b/src/libcmd/repl.cc index 731337004..df8932087 100644 --- a/src/nix/repl.cc +++ b/src/libcmd/repl.cc @@ -22,9 +22,11 @@ extern "C" { #include "ansicolor.hh" #include "shared.hh" #include "eval.hh" +#include "eval-cache.hh" #include "eval-inline.hh" #include "attr-path.hh" #include "store-api.hh" +#include "log-store.hh" #include "common-eval-args.hh" #include "get-drvs.hh" #include "derivations.hh" @@ -32,6 +34,8 @@ extern "C" { #include "command.hh" #include "finally.hh" #include "markdown.hh" +#include "local-fs-store.hh" +#include "progress-bar.hh" #if HAVE_BOEHMGC #define GC_INCLUDE_NEW @@ -46,38 +50,46 @@ struct NixRepl #endif { std::string curDir; - std::unique_ptr<EvalState> state; + ref<EvalState> state; Bindings * autoArgs; + size_t debugTraceIndex; + Strings loadedFiles; + typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues; + std::function<AnnotatedValues()> getValues; const static int envSize = 32768; - StaticEnv staticEnv; + std::shared_ptr<StaticEnv> staticEnv; Env * env; int displ; StringSet varNames; const Path historyFile; - NixRepl(const Strings & searchPath, nix::ref<Store> store); + NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state, + std::function<AnnotatedValues()> getValues); ~NixRepl(); - void mainLoop(const std::vector<std::string> & files); + void mainLoop(); StringSet completePrefix(const std::string & prefix); - bool getLine(std::string & input, const std::string &prompt); + bool getLine(std::string & input, const std::string & prompt); StorePath getDerivationPath(Value & v); bool processLine(std::string line); + void loadFile(const Path & path); void loadFlake(const std::string & flakeRef); void initEnv(); + void loadFiles(); void reloadFiles(); void addAttrsToScope(Value & attrs); - void addVarToScope(const Symbol & name, Value & v); + void addVarToScope(const Symbol name, Value & v); Expr * parseString(std::string s); void evalString(std::string s, Value & v); + void loadDebugTraceEnv(DebugTrace & dt); typedef std::set<Value *> ValuesSeen; - std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth); - std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen); + std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth); + std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen); }; @@ -90,9 +102,12 @@ std::string removeWhitespace(std::string s) } -NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store) - : state(std::make_unique<EvalState>(searchPath, store)) - , staticEnv(false, &state->staticBaseEnv) +NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state, + std::function<NixRepl::AnnotatedValues()> getValues) + : state(state) + , debugTraceIndex(0) + , getValues(getValues) + , staticEnv(new StaticEnv(false, state->staticBaseEnv.get())) , historyFile(getDataDir() + "/nix/repl-history") { curDir = absPath("."); @@ -104,23 +119,20 @@ NixRepl::~NixRepl() write_history(historyFile.c_str()); } -std::string runNix(Path program, const Strings & args, +void runNix(Path program, const Strings & args, const std::optional<std::string> & input = {}) { auto subprocessEnv = getEnv(); subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue(); - auto res = runProgram(RunOptions { + runProgram2(RunOptions { .program = settings.nixBinDir+ "/" + program, .args = args, .environment = subprocessEnv, .input = input, }); - if (!statusOk(res.first)) - throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first))); - - return res.second; + return; } static NixRepl * curRepl; // ugly @@ -196,20 +208,45 @@ namespace { } } -void NixRepl::mainLoop(const std::vector<std::string> & files) +static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positions, const DebugTrace & dt) +{ + if (dt.isError) + out << ANSI_RED "error: " << ANSI_NORMAL; + out << dt.hint.str() << "\n"; + + // prefer direct pos, but if noPos then try the expr. + auto pos = *dt.pos + ? *dt.pos + : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; + + if (pos) { + printAtPos(pos, out); + + auto loc = getCodeLines(pos); + if (loc.has_value()) { + out << "\n"; + printCodeLines(out, "", pos, *loc); + out << "\n"; + } + } + + return out; +} + +void NixRepl::mainLoop() { std::string error = ANSI_RED "error:" ANSI_NORMAL " "; notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n"); - for (auto & i : files) - loadedFiles.push_back(i); - - reloadFiles(); - if (!loadedFiles.empty()) notice(""); + loadFiles(); // Allow nix-repl specific settings in .inputrc rl_readline_name = "nix-repl"; - createDirs(dirOf(historyFile)); + try { + createDirs(dirOf(historyFile)); + } catch (SysError & e) { + logWarning(e.info()); + } #ifndef READLINE el_hist_size = 1000; #endif @@ -220,14 +257,21 @@ void NixRepl::mainLoop(const std::vector<std::string> & files) rl_set_list_possib_func(listPossibleCallback); #endif + /* Stop the progress bar because it interferes with the display of + the repl. */ + stopProgressBar(); + std::string input; while (true) { // When continuing input from previous lines, don't print a prompt, just align to the same // number of chars as the prompt. - if (!getLine(input, input.empty() ? "nix-repl> " : " ")) + if (!getLine(input, input.empty() ? "nix-repl> " : " ")) { + // ctrl-D should exit the debugger. + state->debugStop = false; + state->debugQuit = true; break; - + } try { if (!removeWhitespace(input).empty() && !processLine(input)) return; } catch (ParseError & e) { @@ -238,6 +282,14 @@ void NixRepl::mainLoop(const std::vector<std::string> & files) } else { printMsg(lvlError, e.msg()); } + } catch (EvalError & e) { + // in debugger mode, an EvalError should trigger another repl session. + // when that session returns the exception will land here. No need to show it again; + // show the error for this repl session instead. + if (state->debugRepl && !state->debugTraces.empty()) + showDebugTrace(std::cout, state->positions, state->debugTraces.front()); + else + printMsg(lvlError, e.msg()); } catch (Error & e) { printMsg(lvlError, e.msg()); } catch (Interrupted & e) { @@ -345,9 +397,9 @@ StringSet NixRepl::completePrefix(const std::string & prefix) state->forceAttrs(v, noPos); for (auto & i : *v.attrs) { - std::string name = i.name; + std::string_view name = state->symbols[i.name]; if (name.substr(0, cur2.size()) != cur2) continue; - completions.insert(prev + expr + "." + name); + completions.insert(concatStrings(prev, expr, ".", name)); } } catch (ParseError & e) { @@ -384,18 +436,31 @@ StorePath NixRepl::getDerivationPath(Value & v) { auto drvInfo = getDerivation(*state, v, false); if (!drvInfo) throw Error("expression does not evaluate to a derivation, so I can't build it"); - Path drvPathRaw = drvInfo->queryDrvPath(); - if (drvPathRaw == "") - throw Error("expression did not evaluate to a valid derivation (no drv path)"); - StorePath drvPath = state->store->parseStorePath(drvPathRaw); - if (!state->store->isValidPath(drvPath)) - throw Error("expression did not evaluate to a valid derivation (invalid drv path)"); - return drvPath; + auto drvPath = drvInfo->queryDrvPath(); + if (!drvPath) + throw Error("expression did not evaluate to a valid derivation (no 'drvPath' attribute)"); + if (!state->store->isValidPath(*drvPath)) + throw Error("expression evaluated to invalid derivation '%s'", state->store->printStorePath(*drvPath)); + return *drvPath; } +void NixRepl::loadDebugTraceEnv(DebugTrace & dt) +{ + initEnv(); + + auto se = state->getStaticEnv(dt.expr); + if (se) { + auto vm = mapStaticEnvBindings(state->symbols, *se.get(), dt.env); + + // add staticenv vars. + for (auto & [name, value] : *(vm.get())) + addVarToScope(state->symbols.create(name), *value); + } +} bool NixRepl::processLine(std::string line) { + line = trim(line); if (line == "") return true; _isInterrupted = false; @@ -418,7 +483,8 @@ bool NixRepl::processLine(std::string line) << " <expr> Evaluate and print expression\n" << " <x> = <expr> Bind expression to variable\n" << " :a <expr> Add attributes from resulting set to scope\n" - << " :b <expr> Build derivation\n" + << " :b <expr> Build a derivation\n" + << " :bl <expr> Build a derivation, creating GC roots in the working directory\n" << " :e <expr> Open package or function in $EDITOR\n" << " :i <expr> Build derivation, then install result into current profile\n" << " :l <path> Load Nix expression and add it to scope\n" @@ -426,12 +492,72 @@ bool NixRepl::processLine(std::string line) << " :p <expr> Evaluate and print expression recursively\n" << " :q Exit nix-repl\n" << " :r Reload all files\n" - << " :s <expr> Build dependencies of derivation, then start nix-shell\n" + << " :sh <expr> Build dependencies of derivation, then start nix-shell\n" << " :t <expr> Describe result of evaluation\n" << " :u <expr> Build derivation, then start nix-shell\n" << " :doc <expr> Show documentation of a builtin function\n" << " :log <expr> Show logs for a derivation\n" - << " :st [bool] Enable, disable or toggle showing traces for errors\n"; + << " :te [bool] Enable, disable or toggle showing traces for errors\n" + ; + if (state->debugRepl) { + std::cout + << "\n" + << " Debug mode commands\n" + << " :env Show env stack\n" + << " :bt Show trace stack\n" + << " :st Show current trace\n" + << " :st <idx> Change to another trace in the stack\n" + << " :c Go until end of program, exception, or builtins.break\n" + << " :s Go one step\n" + ; + } + + } + + else if (state->debugRepl && (command == ":bt" || command == ":backtrace")) { + for (const auto & [idx, i] : enumerate(state->debugTraces)) { + std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": "; + showDebugTrace(std::cout, state->positions, i); + } + } + + else if (state->debugRepl && (command == ":env")) { + for (const auto & [idx, i] : enumerate(state->debugTraces)) { + if (idx == debugTraceIndex) { + printEnvBindings(*state, i.expr, i.env); + break; + } + } + } + + else if (state->debugRepl && (command == ":st")) { + try { + // change the DebugTrace index. + debugTraceIndex = stoi(arg); + } catch (...) { } + + for (const auto & [idx, i] : enumerate(state->debugTraces)) { + if (idx == debugTraceIndex) { + std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": "; + showDebugTrace(std::cout, state->positions, i); + std::cout << std::endl; + printEnvBindings(*state, i.expr, i.env); + loadDebugTraceEnv(i); + break; + } + } + } + + else if (state->debugRepl && (command == ":s" || command == ":step")) { + // set flag to stop at next DebugTrace; exit repl. + state->debugStop = true; + return false; + } + + else if (state->debugRepl && (command == ":c" || command == ":continue")) { + // set flag to run to next breakpoint or end of program; exit repl. + state->debugStop = false; + return false; } else if (command == ":a" || command == ":add") { @@ -458,21 +584,23 @@ bool NixRepl::processLine(std::string line) Value v; evalString(arg, v); - Pos pos; - - if (v.type() == nPath || v.type() == nString) { - PathSet context; - auto filename = state->coerceToString(noPos, v, context); - pos.file = state->symbols.create(*filename); - } else if (v.isLambda()) { - pos = v.lambda.fun->pos; - } else { - // assume it's a derivation - pos = findPackageFilename(*state, v, arg); - } + const auto [file, line] = [&] () -> std::pair<std::string, uint32_t> { + if (v.type() == nPath || v.type() == nString) { + PathSet context; + auto filename = state->coerceToString(noPos, v, context).toOwned(); + state->symbols.create(filename); + return {filename, 0}; + } else if (v.isLambda()) { + auto pos = state->positions[v.lambda.fun->pos]; + return {pos.file, pos.line}; + } else { + // assume it's a derivation + return findPackageFilename(*state, v, arg); + } + }(); // Open in EDITOR - auto args = editorFor(pos); + auto args = editorFor(file, line); auto editor = args.front(); args.pop_front(); @@ -495,24 +623,32 @@ bool NixRepl::processLine(std::string line) Value v, f, result; evalString(arg, v); evalString("drv: (import <nixpkgs> {}).runCommand \"shell\" { buildInputs = [ drv ]; } \"\"", f); - state->callFunction(f, v, result, Pos()); + state->callFunction(f, v, result, PosIdx()); StorePath drvPath = getDerivationPath(result); runNix("nix-shell", {state->store->printStorePath(drvPath)}); } - else if (command == ":b" || command == ":i" || command == ":s" || command == ":log") { + else if (command == ":b" || command == ":bl" || command == ":i" || command == ":sh" || command == ":log") { Value v; evalString(arg, v); StorePath drvPath = getDerivationPath(v); Path drvPathRaw = state->store->printStorePath(drvPath); - if (command == ":b") { + if (command == ":b" || command == ":bl") { state->store->buildPaths({DerivedPath::Built{drvPath}}); auto drv = state->store->readDerivation(drvPath); logger->cout("\nThis derivation produced the following outputs:"); - for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) - logger->cout(" %s -> %s", outputName, state->store->printStorePath(outputPath)); + for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) { + auto localStore = state->store.dynamic_pointer_cast<LocalFSStore>(); + if (localStore && command == ":bl") { + std::string symlink = "repl-result-" + outputName; + localStore->addPermRoot(outputPath, absPath(symlink)); + logger->cout(" ./%s -> %s", symlink, state->store->printStorePath(outputPath)); + } else { + logger->cout(" %s -> %s", outputName, state->store->printStorePath(outputPath)); + } + } } else if (command == ":i") { runNix("nix-env", {"-i", drvPathRaw}); } else if (command == ":log") { @@ -527,9 +663,16 @@ bool NixRepl::processLine(std::string line) bool foundLog = false; RunPager pager; for (auto & sub : subs) { - auto log = sub->getBuildLog(drvPath); + auto * logSubP = dynamic_cast<LogStore *>(&*sub); + if (!logSubP) { + printInfo("Skipped '%s' which does not support retrieving build logs", sub->getUri()); + continue; + } + auto & logSub = *logSubP; + + auto log = logSub.getBuildLog(drvPath); if (log) { - printInfo("got build log for '%s' from '%s'", drvPathRaw, sub->getUri()); + printInfo("got build log for '%s' from '%s'", drvPathRaw, logSub.getUri()); logger->writeToStdout(*log); foundLog = true; break; @@ -547,8 +690,11 @@ bool NixRepl::processLine(std::string line) printValue(std::cout, v, 1000000000) << std::endl; } - else if (command == ":q" || command == ":quit") + else if (command == ":q" || command == ":quit") { + state->debugStop = false; + state->debugQuit = true; return false; + } else if (command == ":doc") { Value v; @@ -573,7 +719,7 @@ bool NixRepl::processLine(std::string line) throw Error("value does not have documentation"); } - else if (command == ":st" || command == ":show-trace") { + else if (command == ":te" || command == ":trace-enable") { if (arg == "false" || (arg == "" && loggerSettings.showTrace)) { std::cout << "not showing error traces\n"; loggerSettings.showTrace = false; @@ -610,7 +756,6 @@ bool NixRepl::processLine(std::string line) return true; } - void NixRepl::loadFile(const Path & path) { loadedFiles.remove(path); @@ -649,11 +794,11 @@ void NixRepl::initEnv() env = &state->allocEnv(envSize); env->up = &state->baseEnv; displ = 0; - staticEnv.vars.clear(); + staticEnv->vars.clear(); varNames.clear(); - for (auto & i : state->staticBaseEnv.vars) - varNames.insert(i.first); + for (auto & i : state->staticBaseEnv->vars) + varNames.emplace(state->symbols[i.first]); } @@ -661,16 +806,24 @@ void NixRepl::reloadFiles() { initEnv(); + loadFiles(); +} + + +void NixRepl::loadFiles() +{ Strings old = loadedFiles; loadedFiles.clear(); - bool first = true; for (auto & i : old) { - if (!first) notice(""); - first = false; notice("Loading '%1%'...", i); loadFile(i); } + + for (auto & [i, what] : getValues()) { + notice("Loading installable '%1%'...", what); + addAttrsToScope(*i); + } } @@ -681,26 +834,26 @@ void NixRepl::addAttrsToScope(Value & attrs) throw Error("environment full; cannot add more variables"); for (auto & i : *attrs.attrs) { - staticEnv.vars.emplace_back(i.name, displ); + staticEnv->vars.emplace_back(i.name, displ); env->values[displ++] = i.value; - varNames.insert((std::string) i.name); + varNames.emplace(state->symbols[i.name]); } - staticEnv.sort(); - staticEnv.deduplicate(); + staticEnv->sort(); + staticEnv->deduplicate(); notice("Added %1% variables.", attrs.attrs->size()); } -void NixRepl::addVarToScope(const Symbol & name, Value & v) +void NixRepl::addVarToScope(const Symbol name, Value & v) { if (displ >= envSize) throw Error("environment full; cannot add more variables"); - if (auto oldVar = staticEnv.find(name); oldVar != staticEnv.vars.end()) - staticEnv.vars.erase(oldVar); - staticEnv.vars.emplace_back(name, displ); - staticEnv.sort(); + if (auto oldVar = staticEnv->find(name); oldVar != staticEnv->vars.end()) + staticEnv->vars.erase(oldVar); + staticEnv->vars.emplace_back(name, displ); + staticEnv->sort(); env->values[displ++] = &v; - varNames.insert((std::string) name); + varNames.emplace(state->symbols[name]); } @@ -780,8 +933,11 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m str << "«derivation "; Bindings::iterator i = v.attrs->find(state->sDrvPath); PathSet context; - Path drvPath = i != v.attrs->end() ? state->coerceToPath(*i->pos, *i->value, context) : "???"; - str << drvPath << "»"; + if (i != v.attrs->end()) + str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context)); + else + str << "???"; + str << "»"; } else if (maxDepth > 0) { @@ -790,7 +946,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m typedef std::map<std::string, Value *> Sorted; Sorted sorted; for (auto & i : *v.attrs) - sorted[i.name] = i.value; + sorted.emplace(state->symbols[i.name], i.value); for (auto & i : sorted) { if (isVarName(i.first)) @@ -798,7 +954,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m else printStringValue(str, i.first.c_str()); str << " = "; - if (seen.find(i.second) != seen.end()) + if (seen.count(i.second)) str << "«repeated»"; else try { @@ -840,7 +996,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m case nFunction: if (v.isLambda()) { std::ostringstream s; - s << v.lambda.fun->pos; + s << state->positions[v.lambda.fun->pos]; str << ANSI_BLUE "«lambda @ " << filterANSIEscapes(s.str()) << "»" ANSI_NORMAL; } else if (v.isPrimOp()) { str << ANSI_MAGENTA "«primop»" ANSI_NORMAL; @@ -863,17 +1019,66 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m return str; } -struct CmdRepl : StoreCommand, MixEvalArgs +void runRepl( + ref<EvalState>evalState, + const ValMap & extraEnv) +{ + auto getValues = [&]()->NixRepl::AnnotatedValues{ + NixRepl::AnnotatedValues values; + return values; + }; + const Strings & searchPath = {}; + auto repl = std::make_unique<NixRepl>( + searchPath, + openStore(), + evalState, + getValues + ); + + repl->initEnv(); + + // add 'extra' vars. + for (auto & [name, value] : extraEnv) + repl->addVarToScope(repl->state->symbols.create(name), *value); + + repl->mainLoop(); +} + +struct CmdRepl : InstallablesCommand { + CmdRepl() { + evalSettings.pureEval = false; + } + + void prepare() override + { + if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) { + warn("future versions of Nix will require using `--file` to load a file"); + if (this->_installables.size() > 1) + warn("more than one input file is not currently supported"); + auto filePath = this->_installables[0].data(); + file = std::optional(filePath); + _installables.front() = _installables.back(); + _installables.pop_back(); + } + installables = InstallablesCommand::load(); + } + std::vector<std::string> files; - CmdRepl() + Strings getDefaultFlakeAttrPaths() override { - expectArgs({ - .label = "files", - .handler = {&files}, - .completer = completePath - }); + return {""}; + } + + bool useDefaultInstallables() override + { + return file.has_value() or expr.has_value(); + } + + bool forceImpureByDefault() override + { + return true; } std::string description() override @@ -890,10 +1095,37 @@ struct CmdRepl : StoreCommand, MixEvalArgs void run(ref<Store> store) override { - evalSettings.pureEval = false; - auto repl = std::make_unique<NixRepl>(searchPath, openStore()); + auto state = getEvalState(); + auto getValues = [&]()->NixRepl::AnnotatedValues{ + auto installables = load(); + NixRepl::AnnotatedValues values; + for (auto & installable: installables){ + auto what = installable->what(); + if (file){ + auto [val, pos] = installable->toValue(*state); + auto what = installable->what(); + state->forceValue(*val, pos); + auto autoArgs = getAutoArgs(*state); + auto valPost = state->allocValue(); + state->autoCallFunction(*autoArgs, *val, *valPost); + state->forceValue(*valPost, pos); + values.push_back( {valPost, what }); + } else { + auto [val, pos] = installable->toValue(*state); + values.push_back( {val, what} ); + } + } + return values; + }; + auto repl = std::make_unique<NixRepl>( + searchPath, + openStore(), + state, + getValues + ); repl->autoArgs = getAutoArgs(*repl->state); - repl->mainLoop(files); + repl->initEnv(); + repl->mainLoop(); } }; diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index eb0e706c7..94ab60f9a 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -41,13 +41,13 @@ std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s) } -std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const std::string & attrPath, +std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::string & attrPath, Bindings & autoArgs, Value & vIn) { Strings tokens = parseAttrPath(attrPath); Value * v = &vIn; - Pos pos = noPos; + PosIdx pos = noPos; for (auto & attr : tokens) { @@ -74,10 +74,16 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const std::string & throw Error("empty attribute name in selection path '%1%'", attrPath); Bindings::iterator a = v->attrs->find(state.symbols.create(attr)); - if (a == v->attrs->end()) - throw AttrPathNotFound("attribute '%1%' in selection path '%2%' not found", attr, attrPath); + if (a == v->attrs->end()) { + std::set<std::string> attrNames; + for (auto & attr : *v->attrs) + attrNames.insert(state.symbols[attr.name]); + + auto suggestions = Suggestions::bestMatches(attrNames, attr); + throw AttrPathNotFound(suggestions, "attribute '%1%' in selection path '%2%' not found", attr, attrPath); + } v = &*a->value; - pos = *a->pos; + pos = a->pos; } else { @@ -100,7 +106,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const std::string & } -Pos findPackageFilename(EvalState & state, Value & v, std::string what) +std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what) { Value * v2; try { @@ -126,9 +132,7 @@ Pos findPackageFilename(EvalState & state, Value & v, std::string what) throw ParseError("cannot parse line number '%s'", pos); } - Symbol file = state.symbols.create(filename); - - return { foFile, file, lineno, 0 }; + return { std::move(filename), lineno }; } diff --git a/src/libexpr/attr-path.hh b/src/libexpr/attr-path.hh index ff1135a06..117e0051b 100644 --- a/src/libexpr/attr-path.hh +++ b/src/libexpr/attr-path.hh @@ -10,14 +10,14 @@ namespace nix { MakeError(AttrPathNotFound, Error); MakeError(NoPositionInfo, Error); -std::pair<Value *, Pos> findAlongAttrPath( +std::pair<Value *, PosIdx> findAlongAttrPath( EvalState & state, const std::string & attrPath, Bindings & autoArgs, Value & vIn); /* Heuristic to find the filename and lineno or a nix value. */ -Pos findPackageFilename(EvalState & state, Value & v, std::string what); +std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what); std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s); diff --git a/src/libexpr/attr-set.cc b/src/libexpr/attr-set.cc index 52ac47e9b..877116f1f 100644 --- a/src/libexpr/attr-set.cc +++ b/src/libexpr/attr-set.cc @@ -26,7 +26,7 @@ Bindings * EvalState::allocBindings(size_t capacity) /* Create a new attribute named 'name' on an existing attribute set stored in 'vAttrs' and return the newly allocated Value which is associated with this attribute. */ -Value * EvalState::allocAttr(Value & vAttrs, const Symbol & name) +Value * EvalState::allocAttr(Value & vAttrs, Symbol name) { Value * v = allocValue(); vAttrs.attrs->push_back(Attr(name, v)); @@ -40,7 +40,7 @@ Value * EvalState::allocAttr(Value & vAttrs, std::string_view name) } -Value & BindingsBuilder::alloc(const Symbol & name, ptr<Pos> pos) +Value & BindingsBuilder::alloc(Symbol name, PosIdx pos) { auto value = state.allocValue(); bindings->push_back(Attr(name, value, pos)); @@ -48,7 +48,7 @@ Value & BindingsBuilder::alloc(const Symbol & name, ptr<Pos> pos) } -Value & BindingsBuilder::alloc(std::string_view name, ptr<Pos> pos) +Value & BindingsBuilder::alloc(std::string_view name, PosIdx pos) { return alloc(state.symbols.create(name), pos); } diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh index cad9743ea..dcc73b506 100644 --- a/src/libexpr/attr-set.hh +++ b/src/libexpr/attr-set.hh @@ -15,18 +15,27 @@ struct Value; /* Map one attribute name to its value. */ struct Attr { + /* the placement of `name` and `pos` in this struct is important. + both of them are uint32 wrappers, they are next to each other + to make sure that Attr has no padding on 64 bit machines. that + way we keep Attr size at two words with no wasted space. */ Symbol name; + PosIdx pos; Value * value; - ptr<Pos> pos; - Attr(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos)) - : name(name), value(value), pos(pos) { }; - Attr() : pos(&noPos) { }; + Attr(Symbol name, Value * value, PosIdx pos = noPos) + : name(name), pos(pos), value(value) { }; + Attr() { }; bool operator < (const Attr & a) const { return name < a.name; } }; +static_assert(sizeof(Attr) == 2 * sizeof(uint32_t) + sizeof(Value *), + "performance of the evaluator is highly sensitive to the size of Attr. " + "avoid introducing any padding into Attr if at all possible, and do not " + "introduce new fields that need not be present for almost every instance."); + /* Bindings contains all the attributes of an attribute set. It is defined by its size and its capacity, the capacity being the number of Attr elements allocated after this structure, while the size corresponds to @@ -35,13 +44,13 @@ class Bindings { public: typedef uint32_t size_t; - ptr<Pos> pos; + PosIdx pos; private: size_t size_, capacity_; Attr attrs[0]; - Bindings(size_t capacity) : pos(&noPos), size_(0), capacity_(capacity) { } + Bindings(size_t capacity) : size_(0), capacity_(capacity) { } Bindings(const Bindings & bindings) = delete; public: @@ -57,7 +66,7 @@ public: attrs[size_++] = attr; } - iterator find(const Symbol & name) + iterator find(Symbol name) { Attr key(name, 0); iterator i = std::lower_bound(begin(), end(), key); @@ -65,7 +74,7 @@ public: return end(); } - Attr * get(const Symbol & name) + Attr * get(Symbol name) { Attr key(name, 0); iterator i = std::lower_bound(begin(), end(), key); @@ -73,18 +82,6 @@ public: return nullptr; } - Attr & need(const Symbol & name, const Pos & pos = noPos) - { - auto a = get(name); - if (!a) - throw Error({ - .msg = hintfmt("attribute '%s' missing", name), - .errPos = pos - }); - - return *a; - } - iterator begin() { return &attrs[0]; } iterator end() { return &attrs[size_]; } @@ -98,14 +95,15 @@ public: size_t capacity() { return capacity_; } /* Returns the attributes in lexicographically sorted order. */ - std::vector<const Attr *> lexicographicOrder() const + std::vector<const Attr *> lexicographicOrder(const SymbolTable & symbols) const { std::vector<const Attr *> res; res.reserve(size_); for (size_t n = 0; n < size_; n++) res.emplace_back(&attrs[n]); - std::sort(res.begin(), res.end(), [](const Attr * a, const Attr * b) { - return (const std::string &) a->name < (const std::string &) b->name; + std::sort(res.begin(), res.end(), [&](const Attr * a, const Attr * b) { + std::string_view sa = symbols[a->name], sb = symbols[b->name]; + return sa < sb; }); return res; } @@ -130,7 +128,7 @@ public: : bindings(bindings), state(state) { } - void insert(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos)) + void insert(Symbol name, Value * value, PosIdx pos = noPos) { insert(Attr(name, value, pos)); } @@ -145,9 +143,9 @@ public: bindings->push_back(attr); } - Value & alloc(const Symbol & name, ptr<Pos> pos = ptr(&noPos)); + Value & alloc(Symbol name, PosIdx pos = noPos); - Value & alloc(std::string_view name, ptr<Pos> pos = ptr(&noPos)); + Value & alloc(std::string_view name, PosIdx pos = noPos); Bindings * finish() { diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 00d0749f9..b259eec63 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -21,6 +21,8 @@ struct AttrDb { std::atomic_bool failed{false}; + const Store & cfg; + struct State { SQLite db; @@ -33,12 +35,19 @@ struct AttrDb std::unique_ptr<Sync<State>> _state; - AttrDb(const Hash & fingerprint) - : _state(std::make_unique<Sync<State>>()) + SymbolTable & symbols; + + AttrDb( + const Store & cfg, + const Hash & fingerprint, + SymbolTable & symbols) + : cfg(cfg) + , _state(std::make_unique<Sync<State>>()) + , symbols(symbols) { auto state(_state->lock()); - Path cacheDir = getCacheDir() + "/nix/eval-cache-v2"; + Path cacheDir = getCacheDir() + "/nix/eval-cache-v4"; createDirs(cacheDir); Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite"; @@ -97,7 +106,7 @@ struct AttrDb state->insertAttribute.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::FullAttrs) (0, false).exec(); @@ -107,7 +116,7 @@ struct AttrDb for (auto & attr : attrs) state->insertAttribute.use() (rowId) - (attr) + (symbols[attr]) (AttrType::Placeholder) (0, false).exec(); @@ -132,14 +141,14 @@ struct AttrDb } state->insertAttributeWithContext.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::String) (s) (ctx).exec(); } else { state->insertAttribute.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::String) (s).exec(); } @@ -158,7 +167,7 @@ struct AttrDb state->insertAttribute.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::Bool) (b ? 1 : 0).exec(); @@ -166,6 +175,42 @@ struct AttrDb }); } + AttrId setInt( + AttrKey key, + int n) + { + return doSQLite([&]() + { + auto state(_state->lock()); + + state->insertAttribute.use() + (key.first) + (symbols[key.second]) + (AttrType::Int) + (n).exec(); + + return state->db.getLastInsertedRowId(); + }); + } + + AttrId setListOfStrings( + AttrKey key, + const std::vector<std::string> & l) + { + return doSQLite([&]() + { + auto state(_state->lock()); + + state->insertAttribute.use() + (key.first) + (symbols[key.second]) + (AttrType::ListOfStrings) + (concatStringsSep("\t", l)).exec(); + + return state->db.getLastInsertedRowId(); + }); + } + AttrId setPlaceholder(AttrKey key) { return doSQLite([&]() @@ -174,7 +219,7 @@ struct AttrDb state->insertAttribute.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::Placeholder) (0, false).exec(); @@ -190,7 +235,7 @@ struct AttrDb state->insertAttribute.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::Missing) (0, false).exec(); @@ -206,7 +251,7 @@ struct AttrDb state->insertAttribute.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::Misc) (0, false).exec(); @@ -222,7 +267,7 @@ struct AttrDb state->insertAttribute.use() (key.first) - (key.second) + (symbols[key.second]) (AttrType::Failed) (0, false).exec(); @@ -230,16 +275,14 @@ struct AttrDb }); } - std::optional<std::pair<AttrId, AttrValue>> getAttr( - AttrKey key, - SymbolTable & symbols) + std::optional<std::pair<AttrId, AttrValue>> getAttr(AttrKey key) { auto state(_state->lock()); - auto queryAttribute(state->queryAttribute.use()(key.first)(key.second)); + auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second])); if (!queryAttribute.next()) return {}; - auto rowId = (AttrType) queryAttribute.getInt(0); + auto rowId = (AttrId) queryAttribute.getInt(0); auto type = (AttrType) queryAttribute.getInt(1); switch (type) { @@ -250,18 +293,22 @@ struct AttrDb std::vector<Symbol> attrs; auto queryAttributes(state->queryAttributes.use()(rowId)); while (queryAttributes.next()) - attrs.push_back(symbols.create(queryAttributes.getStr(0))); + attrs.emplace_back(symbols.create(queryAttributes.getStr(0))); return {{rowId, attrs}}; } case AttrType::String: { - std::vector<std::pair<Path, std::string>> context; + NixStringContext context; if (!queryAttribute.isNull(3)) for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";")) - context.push_back(decodeContext(s)); + context.push_back(decodeContext(cfg, s)); return {{rowId, string_t{queryAttribute.getStr(2), context}}}; } case AttrType::Bool: return {{rowId, queryAttribute.getInt(2) != 0}}; + case AttrType::Int: + return {{rowId, int_t{queryAttribute.getInt(2)}}}; + case AttrType::ListOfStrings: + return {{rowId, tokenizeString<std::vector<std::string>>(queryAttribute.getStr(2), "\t")}}; case AttrType::Missing: return {{rowId, missing_t()}}; case AttrType::Misc: @@ -274,10 +321,13 @@ struct AttrDb } }; -static std::shared_ptr<AttrDb> makeAttrDb(const Hash & fingerprint) +static std::shared_ptr<AttrDb> makeAttrDb( + const Store & cfg, + const Hash & fingerprint, + SymbolTable & symbols) { try { - return std::make_shared<AttrDb>(fingerprint); + return std::make_shared<AttrDb>(cfg, fingerprint, symbols); } catch (SQLiteError &) { ignoreException(); return nullptr; @@ -288,7 +338,7 @@ EvalCache::EvalCache( std::optional<std::reference_wrapper<const Hash>> useCache, EvalState & state, RootLoader rootLoader) - : db(useCache ? makeAttrDb(*useCache) : nullptr) + : db(useCache ? makeAttrDb(*state.store, *useCache, state.symbols) : nullptr) , state(state) , rootLoader(rootLoader) { @@ -303,9 +353,9 @@ Value * EvalCache::getRootValue() return *value; } -std::shared_ptr<AttrCursor> EvalCache::getRoot() +ref<AttrCursor> EvalCache::getRoot() { - return std::make_shared<AttrCursor>(ref(shared_from_this()), std::nullopt); + return make_ref<AttrCursor>(ref(shared_from_this()), std::nullopt); } AttrCursor::AttrCursor( @@ -324,8 +374,7 @@ AttrKey AttrCursor::getKey() if (!parent) return {0, root->state.sEpsilon}; if (!parent->first->cachedValue) { - parent->first->cachedValue = root->db->getAttr( - parent->first->getKey(), root->state.symbols); + parent->first->cachedValue = root->db->getAttr(parent->first->getKey()); assert(parent->first->cachedValue); } return {parent->first->cachedValue->first, parent->second}; @@ -366,17 +415,17 @@ std::vector<Symbol> AttrCursor::getAttrPath(Symbol name) const std::string AttrCursor::getAttrPathStr() const { - return concatStringsSep(".", getAttrPath()); + return concatStringsSep(".", root->state.symbols.resolve(getAttrPath())); } std::string AttrCursor::getAttrPathStr(Symbol name) const { - return concatStringsSep(".", getAttrPath(name)); + return concatStringsSep(".", root->state.symbols.resolve(getAttrPath(name))); } Value & AttrCursor::forceValue() { - debug("evaluating uncached attribute %s", getAttrPathStr()); + debug("evaluating uncached attribute '%s'", getAttrPathStr()); auto & v = getValue(); @@ -397,6 +446,8 @@ Value & AttrCursor::forceValue() cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}}; else if (v.type() == nBool) cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean}; + else if (v.type() == nInt) + cachedValue = {root->db->setInt(getKey(), v.integer), int_t{v.integer}}; else if (v.type() == nAttrs) ; // FIXME: do something? else @@ -406,26 +457,36 @@ Value & AttrCursor::forceValue() return v; } +Suggestions AttrCursor::getSuggestionsForAttr(Symbol name) +{ + auto attrNames = getAttrs(); + std::set<std::string> strAttrNames; + for (auto & name : attrNames) + strAttrNames.insert(root->state.symbols[name]); + + return Suggestions::bestMatches(strAttrNames, root->state.symbols[name]); +} + std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErrors) { if (root->db) { if (!cachedValue) - cachedValue = root->db->getAttr(getKey(), root->state.symbols); + cachedValue = root->db->getAttr(getKey()); if (cachedValue) { if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) { for (auto & attr : *attrs) if (attr == name) - return std::make_shared<AttrCursor>(root, std::make_pair(shared_from_this(), name)); + return std::make_shared<AttrCursor>(root, std::make_pair(shared_from_this(), attr)); return nullptr; } else if (std::get_if<placeholder_t>(&cachedValue->second)) { - auto attr = root->db->getAttr({cachedValue->first, name}, root->state.symbols); + auto attr = root->db->getAttr({cachedValue->first, name}); if (attr) { if (std::get_if<missing_t>(&attr->second)) return nullptr; else if (std::get_if<failed_t>(&attr->second)) { if (forceErrors) - debug("reevaluating failed cached attribute '%s'"); + debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name)); else throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name)); } else @@ -464,7 +525,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro cachedValue2 = {root->db->setPlaceholder({cachedValue->first, name}), placeholder_t()}; } - return std::make_shared<AttrCursor>( + return make_ref<AttrCursor>( root, std::make_pair(shared_from_this(), name), attr->value, std::move(cachedValue2)); } @@ -473,47 +534,51 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name) return maybeGetAttr(root->state.symbols.create(name)); } -std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name, bool forceErrors) +ref<AttrCursor> AttrCursor::getAttr(Symbol name, bool forceErrors) { auto p = maybeGetAttr(name, forceErrors); if (!p) throw Error("attribute '%s' does not exist", getAttrPathStr(name)); - return p; + return ref(p); } -std::shared_ptr<AttrCursor> AttrCursor::getAttr(std::string_view name) +ref<AttrCursor> AttrCursor::getAttr(std::string_view name) { return getAttr(root->state.symbols.create(name)); } -std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force) +OrSuggestions<ref<AttrCursor>> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force) { auto res = shared_from_this(); for (auto & attr : attrPath) { - res = res->maybeGetAttr(attr, force); - if (!res) return {}; + auto child = res->maybeGetAttr(attr, force); + if (!child) { + auto suggestions = res->getSuggestionsForAttr(attr); + return OrSuggestions<ref<AttrCursor>>::failed(suggestions); + } + res = child; } - return res; + return ref(res); } std::string AttrCursor::getString() { if (root->db) { if (!cachedValue) - cachedValue = root->db->getAttr(getKey(), root->state.symbols); + cachedValue = root->db->getAttr(getKey()); if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) { if (auto s = std::get_if<string_t>(&cachedValue->second)) { debug("using cached string attribute '%s'", getAttrPathStr()); return s->first; } else - throw TypeError("'%s' is not a string", getAttrPathStr()); + root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr())); } } auto & v = forceValue(); if (v.type() != nString && v.type() != nPath) - throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())); + root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type()))); return v.type() == nString ? v.string.s : v.path; } @@ -522,12 +587,12 @@ string_t AttrCursor::getStringWithContext() { if (root->db) { if (!cachedValue) - cachedValue = root->db->getAttr(getKey(), root->state.symbols); + cachedValue = root->db->getAttr(getKey()); if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) { if (auto s = std::get_if<string_t>(&cachedValue->second)) { bool valid = true; for (auto & c : s->second) { - if (!root->state.store->isValidPath(root->state.store->parseStorePath(c.first))) { + if (!root->state.store->isValidPath(c.first)) { valid = false; break; } @@ -537,66 +602,122 @@ string_t AttrCursor::getStringWithContext() return *s; } } else - throw TypeError("'%s' is not a string", getAttrPathStr()); + root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr())); } } auto & v = forceValue(); if (v.type() == nString) - return {v.string.s, v.getContext()}; + return {v.string.s, v.getContext(*root->state.store)}; else if (v.type() == nPath) return {v.path, {}}; else - throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())); + root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type()))); } bool AttrCursor::getBool() { if (root->db) { if (!cachedValue) - cachedValue = root->db->getAttr(getKey(), root->state.symbols); + cachedValue = root->db->getAttr(getKey()); if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) { if (auto b = std::get_if<bool>(&cachedValue->second)) { debug("using cached Boolean attribute '%s'", getAttrPathStr()); return *b; } else - throw TypeError("'%s' is not a Boolean", getAttrPathStr()); + root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr())); } } auto & v = forceValue(); if (v.type() != nBool) - throw TypeError("'%s' is not a Boolean", getAttrPathStr()); + root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr())); return v.boolean; } +NixInt AttrCursor::getInt() +{ + if (root->db) { + if (!cachedValue) + cachedValue = root->db->getAttr(getKey()); + if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) { + if (auto i = std::get_if<int_t>(&cachedValue->second)) { + debug("using cached Integer attribute '%s'", getAttrPathStr()); + return i->x; + } else + throw TypeError("'%s' is not an Integer", getAttrPathStr()); + } + } + + auto & v = forceValue(); + + if (v.type() != nInt) + throw TypeError("'%s' is not an Integer", getAttrPathStr()); + + return v.integer; +} + +std::vector<std::string> AttrCursor::getListOfStrings() +{ + if (root->db) { + if (!cachedValue) + cachedValue = root->db->getAttr(getKey()); + if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) { + if (auto l = std::get_if<std::vector<std::string>>(&cachedValue->second)) { + debug("using cached list of strings attribute '%s'", getAttrPathStr()); + return *l; + } else + throw TypeError("'%s' is not a list of strings", getAttrPathStr()); + } + } + + debug("evaluating uncached attribute '%s'", getAttrPathStr()); + + auto & v = getValue(); + root->state.forceValue(v, noPos); + + if (v.type() != nList) + throw TypeError("'%s' is not a list", getAttrPathStr()); + + std::vector<std::string> res; + + for (auto & elem : v.listItems()) + res.push_back(std::string(root->state.forceStringNoCtx(*elem))); + + if (root->db) + cachedValue = {root->db->setListOfStrings(getKey(), res), res}; + + return res; +} + std::vector<Symbol> AttrCursor::getAttrs() { if (root->db) { if (!cachedValue) - cachedValue = root->db->getAttr(getKey(), root->state.symbols); + cachedValue = root->db->getAttr(getKey()); if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) { if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) { debug("using cached attrset attribute '%s'", getAttrPathStr()); return *attrs; } else - throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr())); } } auto & v = forceValue(); if (v.type() != nAttrs) - throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr())); std::vector<Symbol> attrs; for (auto & attr : *getValue().attrs) attrs.push_back(attr.name); - std::sort(attrs.begin(), attrs.end(), [](const Symbol & a, const Symbol & b) { - return (const std::string &) a < (const std::string &) b; + std::sort(attrs.begin(), attrs.end(), [&](Symbol a, Symbol b) { + std::string_view sa = root->state.symbols[a], sb = root->state.symbols[b]; + return sa < sb; }); if (root->db) diff --git a/src/libexpr/eval-cache.hh b/src/libexpr/eval-cache.hh index 43b34ebcb..c93e55b93 100644 --- a/src/libexpr/eval-cache.hh +++ b/src/libexpr/eval-cache.hh @@ -33,7 +33,7 @@ public: EvalState & state, RootLoader rootLoader); - std::shared_ptr<AttrCursor> getRoot(); + ref<AttrCursor> getRoot(); }; enum AttrType { @@ -44,15 +44,18 @@ enum AttrType { Misc = 4, Failed = 5, Bool = 6, + ListOfStrings = 7, + Int = 8, }; struct placeholder_t {}; struct missing_t {}; struct misc_t {}; struct failed_t {}; +struct int_t { NixInt x; }; typedef uint64_t AttrId; typedef std::pair<AttrId, Symbol> AttrKey; -typedef std::pair<std::string, std::vector<std::pair<Path, std::string>>> string_t; +typedef std::pair<std::string, NixStringContext> string_t; typedef std::variant< std::vector<Symbol>, @@ -61,7 +64,9 @@ typedef std::variant< missing_t, misc_t, failed_t, - bool + bool, + int_t, + std::vector<std::string> > AttrValue; class AttrCursor : public std::enable_shared_from_this<AttrCursor> @@ -94,15 +99,19 @@ public: std::string getAttrPathStr(Symbol name) const; + Suggestions getSuggestionsForAttr(Symbol name); + std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name, bool forceErrors = false); std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name); - std::shared_ptr<AttrCursor> getAttr(Symbol name, bool forceErrors = false); + ref<AttrCursor> getAttr(Symbol name, bool forceErrors = false); - std::shared_ptr<AttrCursor> getAttr(std::string_view name); + ref<AttrCursor> getAttr(std::string_view name); - std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force = false); + /* Get an attribute along a chain of attrsets. Note that this does + not auto-call functors or functions. */ + OrSuggestions<ref<AttrCursor>> findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force = false); std::string getString(); @@ -110,6 +119,10 @@ public: bool getBool(); + NixInt getInt(); + + std::vector<std::string> getListOfStrings(); + std::vector<Symbol> getAttrs(); bool isDerivation(); diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index aef1f6351..f2f4ba725 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -2,29 +2,84 @@ #include "eval.hh" -#define LocalNoInline(f) static f __attribute__((noinline)); f -#define LocalNoInlineNoReturn(f) static f __attribute__((noinline, noreturn)); f - namespace nix { -LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s)) +/* Note: Various places expect the allocated memory to be zeroed. */ +[[gnu::always_inline]] +inline void * allocBytes(size_t n) +{ + void * p; +#if HAVE_BOEHMGC + p = GC_MALLOC(n); +#else + p = calloc(n, 1); +#endif + if (!p) throw std::bad_alloc(); + return p; +} + + +[[gnu::always_inline]] +Value * EvalState::allocValue() { - throw EvalError({ - .msg = hintfmt(s), - .errPos = pos - }); +#if HAVE_BOEHMGC + /* We use the boehm batch allocator to speed up allocations of Values (of which there are many). + GC_malloc_many returns a linked list of objects of the given size, where the first word + of each object is also the pointer to the next object in the list. This also means that we + have to explicitly clear the first word of every object we take. */ + if (!*valueAllocCache) { + *valueAllocCache = GC_malloc_many(sizeof(Value)); + if (!*valueAllocCache) throw std::bad_alloc(); + } + + /* GC_NEXT is a convenience macro for accessing the first word of an object. + Take the first list item, advance the list to the next item, and clear the next pointer. */ + void * p = *valueAllocCache; + *valueAllocCache = GC_NEXT(p); + GC_NEXT(p) = nullptr; +#else + void * p = allocBytes(sizeof(Value)); +#endif + + nrValues++; + return (Value *) p; } -LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v)) + +[[gnu::always_inline]] +Env & EvalState::allocEnv(size_t size) { - throw TypeError({ - .msg = hintfmt(s, showType(v)), - .errPos = pos - }); + nrEnvs++; + nrValuesInEnvs += size; + + Env * env; + +#if HAVE_BOEHMGC + if (size == 1) { + /* see allocValue for explanations. */ + if (!*env1AllocCache) { + *env1AllocCache = GC_malloc_many(sizeof(Env) + sizeof(Value *)); + if (!*env1AllocCache) throw std::bad_alloc(); + } + + void * p = *env1AllocCache; + *env1AllocCache = GC_NEXT(p); + GC_NEXT(p) = nullptr; + env = (Env *) p; + } else +#endif + env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); + + env->type = Env::Plain; + + /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ + + return *env; } -void EvalState::forceValue(Value & v, const Pos & pos) +[[gnu::always_inline]] +void EvalState::forceValue(Value & v, const PosIdx pos) { forceValue(v, [&]() { return pos; }); } @@ -52,13 +107,15 @@ void EvalState::forceValue(Value & v, Callable getPos) } -inline void EvalState::forceAttrs(Value & v, const Pos & pos) +[[gnu::always_inline]] +inline void EvalState::forceAttrs(Value & v, const PosIdx pos) { forceAttrs(v, [&]() { return pos; }); } template <typename Callable> +[[gnu::always_inline]] inline void EvalState::forceAttrs(Value & v, Callable getPos) { forceValue(v, getPos); @@ -67,25 +124,13 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos) } -inline void EvalState::forceList(Value & v, const Pos & pos) +[[gnu::always_inline]] +inline void EvalState::forceList(Value & v, const PosIdx pos) { forceValue(v, pos); if (!v.isList()) throwTypeError(pos, "value is %1% while a list was expected", v); } -/* Note: Various places expect the allocated memory to be zeroed. */ -inline void * allocBytes(size_t n) -{ - void * p; -#if HAVE_BOEHMGC - p = GC_MALLOC(n); -#else - p = calloc(n, 1); -#endif - if (!p) throw std::bad_alloc(); - return p; -} - } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 60f0bf08c..e3716f217 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -18,6 +18,7 @@ #include <sys/resource.h> #include <iostream> #include <fstream> +#include <functional> #include <sys/resource.h> @@ -36,7 +37,6 @@ namespace nix { - static char * allocString(size_t size) { char * t; @@ -63,9 +63,15 @@ static char * dupString(const char * s) } -static char * dupStringWithLen(const char * s, size_t size) +// When there's no need to write to the string, we can optimize away empty +// string allocations. +// This function handles makeImmutableStringWithLen(null, 0) by returning the +// empty string. +static const char * makeImmutableStringWithLen(const char * s, size_t size) { char * t; + if (size == 0) + return ""; #if HAVE_BOEHMGC t = GC_STRNDUP(s, size); #else @@ -75,6 +81,10 @@ static char * dupStringWithLen(const char * s, size_t size) return t; } +static inline const char * makeImmutableString(std::string_view s) { + return makeImmutableStringWithLen(s.data(), s.size()); +} + RootValue allocRootValue(Value * v) { @@ -86,25 +96,21 @@ RootValue allocRootValue(Value * v) } -void printValue(std::ostream & str, std::set<const Value *> & active, const Value & v) +void Value::print(const SymbolTable & symbols, std::ostream & str, + std::set<const void *> * seen) const { checkInterrupt(); - if (!active.insert(&v).second) { - str << "<CYCLE>"; - return; - } - - switch (v.internalType) { + switch (internalType) { case tInt: - str << v.integer; + str << integer; break; case tBool: - str << (v.boolean ? "true" : "false"); + str << (boolean ? "true" : "false"); break; case tString: str << "\""; - for (const char * i = v.string.s; *i; i++) + for (const char * i = string.s; *i; i++) if (*i == '\"' || *i == '\\') str << "\\" << *i; else if (*i == '\n') str << "\\n"; else if (*i == '\r') str << "\\r"; @@ -114,30 +120,41 @@ void printValue(std::ostream & str, std::set<const Value *> & active, const Valu str << "\""; break; case tPath: - str << v.path; // !!! escaping? + str << path; // !!! escaping? break; case tNull: str << "null"; break; case tAttrs: { - str << "{ "; - for (auto & i : v.attrs->lexicographicOrder()) { - str << i->name << " = "; - printValue(str, active, *i->value); - str << "; "; + if (seen && !attrs->empty() && !seen->insert(attrs).second) + str << "«repeated»"; + else { + str << "{ "; + for (auto & i : attrs->lexicographicOrder(symbols)) { + str << symbols[i->name] << " = "; + i->value->print(symbols, str, seen); + str << "; "; + } + str << "}"; } - str << "}"; break; } case tList1: case tList2: case tListN: - str << "[ "; - for (auto v2 : v.listItems()) { - printValue(str, active, *v2); - str << " "; + if (seen && listSize() && !seen->insert(listElems()).second) + str << "«repeated»"; + else { + str << "[ "; + for (auto v2 : listItems()) { + if (v2) + v2->print(symbols, str, seen); + else + str << "(nullptr)"; + str << " "; + } + str << "]"; } - str << "]"; break; case tThunk: case tApp: @@ -153,24 +170,34 @@ void printValue(std::ostream & str, std::set<const Value *> & active, const Valu str << "<PRIMOP-APP>"; break; case tExternal: - str << *v.external; + str << *external; break; case tFloat: - str << v.fpoint; + str << fpoint; break; default: abort(); } +} + - active.erase(&v); +void Value::print(const SymbolTable & symbols, std::ostream & str, bool showRepeated) const +{ + std::set<const void *> seen; + print(symbols, str, showRepeated ? nullptr : &seen); } +// Pretty print types for assertion errors +std::ostream & operator << (std::ostream & os, const ValueType t) { + os << showType(t); + return os; +} -std::ostream & operator << (std::ostream & str, const Value & v) +std::string printValue(const EvalState & state, const Value & v) { - std::set<const Value *> active; - printValue(str, active, v); - return str; + std::ostringstream out; + v.print(state.symbols, out); + return out.str(); } @@ -219,10 +246,10 @@ std::string showType(const Value & v) } } -Pos Value::determinePos(const Pos & pos) const +PosIdx Value::determinePos(const PosIdx pos) const { switch (internalType) { - case tAttrs: return *attrs->pos; + case tAttrs: return attrs->pos; case tLambda: return lambda.fun->pos; case tApp: return app.left->determinePos(pos); default: return pos; @@ -291,7 +318,7 @@ static BoehmGCStackAllocator boehmGCStackAllocator; static Symbol getName(const AttrName & name, EvalState & state, Env & env) { - if (name.symbol.set()) { + if (name.symbol) { return name.symbol; } else { Value nameValue; @@ -419,6 +446,7 @@ EvalState::EvalState( , sBuilder(symbols.create("builder")) , sArgs(symbols.create("args")) , sContentAddressed(symbols.create("__contentAddressed")) + , sImpure(symbols.create("__impure")) , sOutputHash(symbols.create("outputHash")) , sOutputHashAlgo(symbols.create("outputHashAlgo")) , sOutputHashMode(symbols.create("outputHashMode")) @@ -431,18 +459,25 @@ EvalState::EvalState( , sKey(symbols.create("key")) , sPath(symbols.create("path")) , sPrefix(symbols.create("prefix")) + , sOutputSpecified(symbols.create("outputSpecified")) , repair(NoRepair) , emptyBindings(0) , store(store) , buildStore(buildStore ? buildStore : store) + , debugRepl(nullptr) + , debugStop(false) + , debugQuit(false) + , trylevel(0) , regexCache(makeRegexCache()) #if HAVE_BOEHMGC , valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr)) + , env1AllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr)) #else , valueAllocCache(std::make_shared<void *>(nullptr)) + , env1AllocCache(std::make_shared<void *>(nullptr)) #endif , baseEnv(allocEnv(128)) - , staticBaseEnv(false, 0) + , staticBaseEnv{std::make_shared<StaticEnv>(false, nullptr)} { countCalls = getEnv("NIX_COUNT_CALLS").value_or("0") != "0"; @@ -488,23 +523,6 @@ EvalState::~EvalState() } -void EvalState::requireExperimentalFeatureOnEvaluation( - const ExperimentalFeature & feature, - const std::string_view fName, - const Pos & pos) -{ - if (!settings.isExperimentalFeatureEnabled(feature)) { - throw EvalError({ - .msg = hintfmt( - "Cannot call '%2%' because experimental Nix feature '%1%' is disabled. You can enable it via '--extra-experimental-features %1%'.", - feature, - fName - ), - .errPos = pos - }); - } -} - void EvalState::allowPath(const Path & path) { if (allowedPaths) @@ -517,6 +535,14 @@ void EvalState::allowPath(const StorePath & storePath) allowedPaths->insert(store->toRealPath(storePath)); } +void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v) +{ + allowPath(storePath); + + auto path = store->printStorePath(storePath); + v.mkString(path, PathSet({path})); +} + Path EvalState::checkSourcePath(const Path & path_) { if (!allowedPaths) return path_; @@ -617,7 +643,7 @@ Value * EvalState::addConstant(const std::string & name, Value & v) void EvalState::addConstant(const std::string & name, Value * v) { - staticBaseEnv.vars.emplace_back(symbols.create(name), baseEnvDispl); + staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl); baseEnv.values[baseEnvDispl++] = v; auto name2 = name.substr(0, 2) == "__" ? name.substr(2) : name; baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v)); @@ -628,21 +654,21 @@ Value * EvalState::addPrimOp(const std::string & name, size_t arity, PrimOpFun primOp) { auto name2 = name.substr(0, 2) == "__" ? name.substr(2) : name; - Symbol sym = symbols.create(name2); + auto sym = symbols.create(name2); /* Hack to make constants lazy: turn them into a application of the primop to a dummy value. */ if (arity == 0) { auto vPrimOp = allocValue(); - vPrimOp->mkPrimOp(new PrimOp { .fun = primOp, .arity = 1, .name = sym }); + vPrimOp->mkPrimOp(new PrimOp { .fun = primOp, .arity = 1, .name = name2 }); Value v; v.mkApp(vPrimOp, vPrimOp); return addConstant(name, v); } Value * v = allocValue(); - v->mkPrimOp(new PrimOp { .fun = primOp, .arity = arity, .name = sym }); - staticBaseEnv.vars.emplace_back(symbols.create(name), baseEnvDispl); + v->mkPrimOp(new PrimOp { .fun = primOp, .arity = arity, .name = name2 }); + staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl); baseEnv.values[baseEnvDispl++] = v; baseEnv.values[0]->attrs->push_back(Attr(sym, v)); return v; @@ -656,21 +682,21 @@ Value * EvalState::addPrimOp(PrimOp && primOp) if (primOp.arity == 0) { primOp.arity = 1; auto vPrimOp = allocValue(); - vPrimOp->mkPrimOp(new PrimOp(std::move(primOp))); + vPrimOp->mkPrimOp(new PrimOp(primOp)); Value v; v.mkApp(vPrimOp, vPrimOp); return addConstant(primOp.name, v); } - Symbol envName = primOp.name; + auto envName = symbols.create(primOp.name); if (hasPrefix(primOp.name, "__")) - primOp.name = symbols.create(std::string(primOp.name, 2)); + primOp.name = primOp.name.substr(2); Value * v = allocValue(); - v->mkPrimOp(new PrimOp(std::move(primOp))); - staticBaseEnv.vars.emplace_back(envName, baseEnvDispl); + v->mkPrimOp(new PrimOp(primOp)); + staticBaseEnv->vars.emplace_back(envName, baseEnvDispl); baseEnv.values[baseEnvDispl++] = v; - baseEnv.values[0]->attrs->push_back(Attr(primOp.name, v)); + baseEnv.values[0]->attrs->push_back(Attr(symbols.create(primOp.name), v)); return v; } @@ -687,7 +713,7 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v) auto v2 = &v; if (v2->primOp->doc) return Doc { - .pos = noPos, + .pos = {}, .name = v2->primOp->name, .arity = v2->primOp->arity, .args = v2->primOp->args, @@ -698,105 +724,333 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v) } +// just for the current level of StaticEnv, not the whole chain. +void printStaticEnvBindings(const SymbolTable & st, const StaticEnv & se) +{ + std::cout << ANSI_MAGENTA; + for (auto & i : se.vars) + std::cout << st[i.first] << " "; + std::cout << ANSI_NORMAL; + std::cout << std::endl; +} + +// just for the current level of Env, not the whole chain. +void printWithBindings(const SymbolTable & st, const Env & env) +{ + if (env.type == Env::HasWithAttrs) { + std::cout << "with: "; + std::cout << ANSI_MAGENTA; + Bindings::iterator j = env.values[0]->attrs->begin(); + while (j != env.values[0]->attrs->end()) { + std::cout << st[j->name] << " "; + ++j; + } + std::cout << ANSI_NORMAL; + std::cout << std::endl; + } +} + +void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, int lvl) +{ + std::cout << "Env level " << lvl << std::endl; + + if (se.up && env.up) { + std::cout << "static: "; + printStaticEnvBindings(st, se); + printWithBindings(st, env); + std::cout << std::endl; + printEnvBindings(st, *se.up, *env.up, ++lvl); + } else { + std::cout << ANSI_MAGENTA; + // for the top level, don't print the double underscore ones; + // they are in builtins. + for (auto & i : se.vars) + if (!hasPrefix(st[i.first], "__")) + std::cout << st[i.first] << " "; + std::cout << ANSI_NORMAL; + std::cout << std::endl; + printWithBindings(st, env); // probably nothing there for the top level. + std::cout << std::endl; + + } +} + +void printEnvBindings(const EvalState &es, const Expr & expr, const Env & env) +{ + // just print the names for now + auto se = es.getStaticEnv(expr); + if (se) + printEnvBindings(es.symbols, *se, env, 0); +} + +void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, ValMap & vm) +{ + // add bindings for the next level up first, so that the bindings for this level + // override the higher levels. + // The top level bindings (builtins) are skipped since they are added for us by initEnv() + if (env.up && se.up) { + mapStaticEnvBindings(st, *se.up, *env.up, vm); + + if (env.type == Env::HasWithAttrs) { + // add 'with' bindings. + Bindings::iterator j = env.values[0]->attrs->begin(); + while (j != env.values[0]->attrs->end()) { + vm[st[j->name]] = j->value; + ++j; + } + } else { + // iterate through staticenv bindings and add them. + for (auto & i : se.vars) + vm[st[i.first]] = env.values[i.second]; + } + } +} + +std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env) +{ + auto vm = std::make_unique<ValMap>(); + mapStaticEnvBindings(st, se, env, *vm); + return vm; +} + +void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & expr) +{ + // double check we've got the debugRepl function pointer. + if (!debugRepl) + return; + + auto dts = + error && expr.getPos() + ? std::make_unique<DebugTraceStacker>( + *this, + DebugTrace { + .pos = error->info().errPos ? *error->info().errPos : positions[expr.getPos()], + .expr = expr, + .env = env, + .hint = error->info().msg, + .isError = true + }) + : nullptr; + + if (error) + { + printError("%s\n\n", error->what()); + + if (trylevel > 0 && error->info().level != lvlInfo) + printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n"); + + printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL); + } + + auto se = getStaticEnv(expr); + if (se) { + auto vm = mapStaticEnvBindings(symbols, *se.get(), env); + (debugRepl)(ref<EvalState>(shared_from_this()), *vm); + } +} + /* Every "format" object (even temporary) takes up a few hundred bytes of stack space, which is a real killer in the recursive evaluator. So here are some helper functions for throwing exceptions. */ +void EvalState::throwEvalError(const PosIdx pos, const char * s, Env & env, Expr & expr) +{ + debugThrow(EvalError({ + .msg = hintfmt(s), + .errPos = positions[pos] + }), env, expr); +} -LocalNoInlineNoReturn(void throwEvalError(const char * s, const std::string & s2)) +void EvalState::throwEvalError(const PosIdx pos, const char * s) { - throw EvalError(s, s2); + debugThrowLastTrace(EvalError({ + .msg = hintfmt(s), + .errPos = positions[pos] + })); } -LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const std::string & s2)) +void EvalState::throwEvalError(const char * s, const std::string & s2) { - throw EvalError({ + debugThrowLastTrace(EvalError(s, s2)); +} + +void EvalState::throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, + const std::string & s2, Env & env, Expr & expr) +{ + debugThrow(EvalError(ErrorInfo{ .msg = hintfmt(s, s2), - .errPos = pos - }); + .errPos = positions[pos], + .suggestions = suggestions, + }), env, expr); } -LocalNoInlineNoReturn(void throwEvalError(const char * s, const std::string & s2, const std::string & s3)) +void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2) { - throw EvalError(s, s2, s3); + debugThrowLastTrace(EvalError({ + .msg = hintfmt(s, s2), + .errPos = positions[pos] + })); } -LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const std::string & s2, const std::string & s3)) +void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2, Env & env, Expr & expr) { - throw EvalError({ - .msg = hintfmt(s, s2, s3), - .errPos = pos - }); + debugThrow(EvalError({ + .msg = hintfmt(s, s2), + .errPos = positions[pos] + }), env, expr); +} + +void EvalState::throwEvalError(const char * s, const std::string & s2, + const std::string & s3) +{ + debugThrowLastTrace(EvalError({ + .msg = hintfmt(s, s2), + .errPos = positions[noPos] + })); +} + +void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2, + const std::string & s3) +{ + debugThrowLastTrace(EvalError({ + .msg = hintfmt(s, s2), + .errPos = positions[pos] + })); } -LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const Symbol & sym, const Pos & p2)) +void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::string & s2, + const std::string & s3, Env & env, Expr & expr) +{ + debugThrow(EvalError({ + .msg = hintfmt(s, s2), + .errPos = positions[pos] + }), env, expr); +} + +void EvalState::throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2, Env & env, Expr & expr) { // p1 is where the error occurred; p2 is a position mentioned in the message. - throw EvalError({ - .msg = hintfmt(s, sym, p2), - .errPos = p1 - }); + debugThrow(EvalError({ + .msg = hintfmt(s, symbols[sym], positions[p2]), + .errPos = positions[p1] + }), env, expr); } -LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s)) +void EvalState::throwTypeError(const PosIdx pos, const char * s, const Value & v) { - throw TypeError({ + debugThrowLastTrace(TypeError({ + .msg = hintfmt(s, showType(v)), + .errPos = positions[pos] + })); +} + +void EvalState::throwTypeError(const PosIdx pos, const char * s, const Value & v, Env & env, Expr & expr) +{ + debugThrow(TypeError({ + .msg = hintfmt(s, showType(v)), + .errPos = positions[pos] + }), env, expr); +} + +void EvalState::throwTypeError(const PosIdx pos, const char * s) +{ + debugThrowLastTrace(TypeError({ .msg = hintfmt(s), - .errPos = pos - }); + .errPos = positions[pos] + })); } -LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const ExprLambda & fun, const Symbol & s2)) +void EvalState::throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, + const Symbol s2, Env & env, Expr &expr) { - throw TypeError({ - .msg = hintfmt(s, fun.showNamePos(), s2), - .errPos = pos - }); + debugThrow(TypeError({ + .msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]), + .errPos = positions[pos] + }), env, expr); } -LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v)) +void EvalState::throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s, + const ExprLambda & fun, const Symbol s2, Env & env, Expr &expr) { - throw TypeError(s, showType(v)); + debugThrow(TypeError(ErrorInfo { + .msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]), + .errPos = positions[pos], + .suggestions = suggestions, + }), env, expr); } -LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s, const std::string & s1)) +void EvalState::throwTypeError(const char * s, const Value & v, Env & env, Expr &expr) { - throw AssertionError({ + debugThrow(TypeError({ + .msg = hintfmt(s, showType(v)), + .errPos = positions[expr.getPos()], + }), env, expr); +} + +void EvalState::throwAssertionError(const PosIdx pos, const char * s, const std::string & s1, Env & env, Expr &expr) +{ + debugThrow(AssertionError({ .msg = hintfmt(s, s1), - .errPos = pos - }); + .errPos = positions[pos] + }), env, expr); } -LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char * s, const std::string & s1)) +void EvalState::throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1, Env & env, Expr &expr) { - throw UndefinedVarError({ + debugThrow(UndefinedVarError({ .msg = hintfmt(s, s1), - .errPos = pos - }); + .errPos = positions[pos] + }), env, expr); } -LocalNoInlineNoReturn(void throwMissingArgumentError(const Pos & pos, const char * s, const std::string & s1)) +void EvalState::throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1, Env & env, Expr &expr) { - throw MissingArgumentError({ + debugThrow(MissingArgumentError({ .msg = hintfmt(s, s1), - .errPos = pos - }); + .errPos = positions[pos] + }), env, expr); } -LocalNoInline(void addErrorTrace(Error & e, const char * s, const std::string & s2)) +void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const { e.addTrace(std::nullopt, s, s2); } -LocalNoInline(void addErrorTrace(Error & e, const Pos & pos, const char * s, const std::string & s2)) +void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const { - e.addTrace(pos, s, s2); + e.addTrace(positions[pos], s, s2); } +static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker( + EvalState & state, + Expr & expr, + Env & env, + std::optional<ErrPos> pos, + const char * s, + const std::string & s2) +{ + return std::make_unique<DebugTraceStacker>(state, + DebugTrace { + .pos = pos, + .expr = expr, + .env = env, + .hint = hintfmt(s, s2), + .isError = false + }); +} + +DebugTraceStacker::DebugTraceStacker(EvalState & evalState, DebugTrace t) + : evalState(evalState) + , trace(std::move(t)) +{ + evalState.debugTraces.push_front(trace); + if (evalState.debugStop && evalState.debugRepl) + evalState.runDebugRepl(nullptr, trace.env, trace.expr); +} void Value::mkString(std::string_view s) { - mkString(dupStringWithLen(s.data(), s.size())); + mkString(makeImmutableString(s)); } @@ -827,7 +1081,7 @@ void Value::mkStringMove(const char * s, const PathSet & context) void Value::mkPath(std::string_view s) { - mkPath(dupStringWithLen(s.data(), s.size())); + mkPath(makeImmutableString(s)); } @@ -847,52 +1101,15 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) } Bindings::iterator j = env->values[0]->attrs->find(var.name); if (j != env->values[0]->attrs->end()) { - if (countCalls) attrSelects[*j->pos]++; + if (countCalls) attrSelects[j->pos]++; return j->value; } if (!env->prevWith) - throwUndefinedVarError(var.pos, "undefined variable '%1%'", var.name); + throwUndefinedVarError(var.pos, "undefined variable '%1%'", symbols[var.name], *env, const_cast<ExprVar&>(var)); for (size_t l = env->prevWith; l; --l, env = env->up) ; } } - -Value * EvalState::allocValue() -{ - /* We use the boehm batch allocator to speed up allocations of Values (of which there are many). - GC_malloc_many returns a linked list of objects of the given size, where the first word - of each object is also the pointer to the next object in the list. This also means that we - have to explicitly clear the first word of every object we take. */ - if (!*valueAllocCache) { - *valueAllocCache = GC_malloc_many(sizeof(Value)); - if (!*valueAllocCache) throw std::bad_alloc(); - } - - /* GC_NEXT is a convenience macro for accessing the first word of an object. - Take the first list item, advance the list to the next item, and clear the next pointer. */ - void * p = *valueAllocCache; - GC_PTR_STORE_AND_DIRTY(&*valueAllocCache, GC_NEXT(p)); - GC_NEXT(p) = nullptr; - - nrValues++; - auto v = (Value *) p; - return v; -} - - -Env & EvalState::allocEnv(size_t size) -{ - nrEnvs++; - nrValuesInEnvs += size; - Env * env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); - env->type = Env::Plain; - - /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ - - return *env; -} - - void EvalState::mkList(Value & v, size_t size) { v.mkList(size); @@ -917,13 +1134,14 @@ void EvalState::mkThunk_(Value & v, Expr * expr) } -void EvalState::mkPos(Value & v, ptr<Pos> pos) +void EvalState::mkPos(Value & v, PosIdx p) { - if (pos->file.set()) { + auto pos = positions[p]; + if (!pos.file.empty()) { auto attrs = buildBindings(3); - attrs.alloc(sFile).mkString(pos->file); - attrs.alloc(sLine).mkInt(pos->line); - attrs.alloc(sColumn).mkInt(pos->column); + attrs.alloc(sFile).mkString(pos.file); + attrs.alloc(sLine).mkInt(pos.line); + attrs.alloc(sColumn).mkInt(pos.column); v.mkAttrs(attrs); } else v.mkNull(); @@ -1024,6 +1242,15 @@ void EvalState::cacheFile( fileParseCache[resolvedPath] = e; try { + auto dts = debugRepl + ? makeDebugTraceStacker( + *this, + *e, + this->baseEnv, + e->getPos() ? std::optional(ErrPos(positions[e->getPos()])) : std::nullopt, + "while evaluating the file '%1%':", resolvedPath) + : nullptr; + // Enforce that 'flake.nix' is a direct attrset, not a // computation. if (mustBeTrivial && @@ -1051,17 +1278,17 @@ inline bool EvalState::evalBool(Env & env, Expr * e) Value v; e->eval(*this, env, v); if (v.type() != nBool) - throwTypeError("value is %1% while a Boolean was expected", v); + throwTypeError(noPos, "value is %1% while a Boolean was expected", v, env, *e); return v.boolean; } -inline bool EvalState::evalBool(Env & env, Expr * e, const Pos & pos) +inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos) { Value v; e->eval(*this, env, v); if (v.type() != nBool) - throwTypeError(pos, "value is %1% while a Boolean was expected", v); + throwTypeError(pos, "value is %1% while a Boolean was expected", v, env, *e); return v.boolean; } @@ -1070,7 +1297,7 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v) { e->eval(*this, env, v); if (v.type() != nAttrs) - throwTypeError("value is %1% while a set was expected", v); + throwTypeError(noPos, "value is %1% while a set was expected", v, env, *e); } @@ -1130,7 +1357,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) } else vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); env2.values[displ++] = vAttr; - v.attrs->push_back(Attr(i.first, vAttr, ptr(&i.second.pos))); + v.attrs->push_back(Attr(i.first, vAttr, i.second.pos)); } /* If the rec contains an attribute called `__overrides', then @@ -1162,7 +1389,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) else for (auto & i : attrs) - v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), ptr(&i.second.pos))); + v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), i.second.pos)); /* Dynamic attrs apply *after* rec and __overrides. */ for (auto & i : dynamicAttrs) { @@ -1172,18 +1399,18 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) if (nameVal.type() == nNull) continue; state.forceStringNoCtx(nameVal); - Symbol nameSym = state.symbols.create(nameVal.string.s); + auto nameSym = state.symbols.create(nameVal.string.s); Bindings::iterator j = v.attrs->find(nameSym); if (j != v.attrs->end()) - throwEvalError(i.pos, "dynamic attribute '%1%' already defined at %2%", nameSym, *j->pos); + state.throwEvalError(i.pos, "dynamic attribute '%1%' already defined at %2%", nameSym, j->pos, env, *this); i.valueExpr->setName(nameSym); /* Keep sorted order so find can catch duplicates */ - v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), ptr(&i.pos))); + v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), i.pos)); v.attrs->sort(); // FIXME: inefficient } - v.attrs->pos = ptr(&pos); + v.attrs->pos = pos; } @@ -1228,10 +1455,12 @@ static std::string showAttrPath(EvalState & state, Env & env, const AttrPath & a for (auto & i : attrPath) { if (!first) out << '.'; else first = false; try { - out << getName(i, state, env); + out << state.symbols[getName(i, state, env)]; } catch (Error & e) { - assert(!i.symbol.set()); - out << "\"${" << *i.expr << "}\""; + assert(!i.symbol); + out << "\"${"; + i.expr->show(state.symbols, out); + out << "}\""; } } return out.str(); @@ -1241,17 +1470,26 @@ static std::string showAttrPath(EvalState & state, Env & env, const AttrPath & a void ExprSelect::eval(EvalState & state, Env & env, Value & v) { Value vTmp; - ptr<Pos> pos2(&noPos); + PosIdx pos2; Value * vAttrs = &vTmp; e->eval(state, env, vTmp); try { + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env, + state.positions[pos2], + "while evaluating the attribute '%1%'", + showAttrPath(state, env, attrPath)) + : nullptr; for (auto & i : attrPath) { state.nrLookups++; Bindings::iterator j; - Symbol name = getName(i, state, env); + auto name = getName(i, state, env); if (def) { state.forceValue(*vAttrs, pos); if (vAttrs->type() != nAttrs || @@ -1262,19 +1500,27 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) } } else { state.forceAttrs(*vAttrs, pos); - if ((j = vAttrs->attrs->find(name)) == vAttrs->attrs->end()) - throwEvalError(pos, "attribute '%1%' missing", name); + if ((j = vAttrs->attrs->find(name)) == vAttrs->attrs->end()) { + std::set<std::string> allAttrNames; + for (auto & attr : *vAttrs->attrs) + allAttrNames.insert(state.symbols[attr.name]); + state.throwEvalError( + pos, + Suggestions::bestMatches(allAttrNames, state.symbols[name]), + "attribute '%1%' missing", state.symbols[name], env, *this); + } } vAttrs = j->value; pos2 = j->pos; - if (state.countCalls) state.attrSelects[*pos2]++; + if (state.countCalls) state.attrSelects[pos2]++; } - state.forceValue(*vAttrs, (*pos2 != noPos ? *pos2 : this->pos ) ); + state.forceValue(*vAttrs, (pos2 ? pos2 : this->pos ) ); } catch (Error & e) { - if (*pos2 != noPos && pos2->file != state.sDerivationNix) - addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'", + auto pos2r = state.positions[pos2]; + if (pos2 && pos2r.file != state.derivationNixPath) + state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'", showAttrPath(state, env, attrPath)); throw; } @@ -1293,7 +1539,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v) for (auto & i : attrPath) { state.forceValue(*vAttrs, noPos); Bindings::iterator j; - Symbol name = getName(i, state, env); + auto name = getName(i, state, env); if (vAttrs->type() != nAttrs || (j = vAttrs->attrs->find(name)) == vAttrs->attrs->end()) { @@ -1314,9 +1560,11 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v) } -void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const Pos & pos) +void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { - auto trace = evalSettings.traceFunctionCalls ? std::make_unique<FunctionCallTrace>(pos) : nullptr; + auto trace = evalSettings.traceFunctionCalls + ? std::make_unique<FunctionCallTrace>(positions[pos]) + : nullptr; forceValue(fun, pos); @@ -1341,7 +1589,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & ExprLambda & lambda(*vCur.lambda.fun); auto size = - (lambda.arg.empty() ? 0 : 1) + + (!lambda.arg ? 0 : 1) + (lambda.hasFormals() ? lambda.formals->formals.size() : 0); Env & env2(allocEnv(size)); env2.up = vCur.lambda.env; @@ -1350,11 +1598,10 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & if (!lambda.hasFormals()) env2.values[displ++] = args[0]; - else { forceAttrs(*args[0], pos); - if (!lambda.arg.empty()) + if (lambda.arg) env2.values[displ++] = args[0]; /* For each formal argument, get the actual argument. If @@ -1365,7 +1612,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto j = args[0]->attrs->get(i.name); if (!j) { if (!i.def) throwTypeError(pos, "%1% called without required argument '%2%'", - lambda, i.name); + lambda, i.name, *fun.lambda.env, lambda); env2.values[displ++] = i.def->maybeThunk(*this, env2); } else { attrsUsed++; @@ -1379,8 +1626,16 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & /* Nope, so show the first unexpected argument to the user. */ for (auto & i : *args[0]->attrs) - if (!lambda.formals->has(i.name)) - throwTypeError(pos, "%1% called with unexpected argument '%2%'", lambda, i.name); + if (!lambda.formals->has(i.name)) { + std::set<std::string> formalNames; + for (auto & formal : lambda.formals->formals) + formalNames.insert(symbols[formal.name]); + throwTypeError( + pos, + Suggestions::bestMatches(formalNames, symbols[i.name]), + "%1% called with unexpected argument '%2%'", + lambda, i.name, *fun.lambda.env, lambda); + } abort(); // can't happen } } @@ -1390,12 +1645,21 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & /* Evaluate the body. */ try { + auto dts = debugRepl + ? makeDebugTraceStacker( + *this, *lambda.body, env2, positions[lambda.pos], + "while evaluating %s", + lambda.name + ? concatStrings("'", symbols[lambda.name], "'") + : "anonymous lambda") + : nullptr; + lambda.body->eval(*this, env2, vCur); } catch (Error & e) { if (loggerSettings.showTrace.get()) { addErrorTrace(e, lambda.pos, "while evaluating %s", - (lambda.name.set() - ? "'" + (const std::string &) lambda.name + "'" + (lambda.name + ? concatStrings("'", symbols[lambda.name], "'") : "anonymous lambda")); addErrorTrace(e, pos, "from call site%s", ""); } @@ -1544,8 +1808,8 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res) Nix attempted to evaluate a function as a top level expression; in this case it must have its arguments supplied either by default values, or passed explicitly with '--arg' or '--argstr'. See -https://nixos.org/manual/nix/stable/#ss-functions.)", i.name); - +https://nixos.org/manual/nix/stable/expressions/language-constructs.html#functions.)", symbols[i.name], + *fun.lambda.env, *fun.lambda.fun); } } } @@ -1576,8 +1840,8 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) { if (!state.evalBool(env, cond, pos)) { std::ostringstream out; - cond->show(out); - throwAssertionError(pos, "assertion '%1%' failed", out.str()); + cond->show(state.symbols, out); + state.throwAssertionError(pos, "assertion '%1%' failed", out.str(), env, *this); } body->eval(state, env, v); } @@ -1670,7 +1934,7 @@ void ExprOpConcatLists::eval(EvalState & state, Env & env, Value & v) } -void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const Pos & pos) +void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos) { nrListConcats++; @@ -1754,14 +2018,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) nf = n; nf += vTmp.fpoint; } else - throwEvalError(i_pos, "cannot add %1% to an integer", showType(vTmp)); + state.throwEvalError(i_pos, "cannot add %1% to an integer", showType(vTmp), env, *this); } else if (firstType == nFloat) { if (vTmp.type() == nInt) { nf += vTmp.integer; } else if (vTmp.type() == nFloat) { nf += vTmp.fpoint; } else - throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp)); + state.throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp), env, *this); } else { if (s.empty()) s.reserve(es->size()); /* skip canonization of first path, which would only be not @@ -1781,7 +2045,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) v.mkFloat(nf); else if (firstType == nPath) { if (!context.empty()) - throwEvalError(pos, "a string that refers to a store path cannot be appended to a path"); + state.throwEvalError(pos, "a string that refers to a store path cannot be appended to a path", env, *this); v.mkPath(canonPath(str())); } else v.mkStringMove(c_str(), context); @@ -1790,7 +2054,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) void ExprPos::eval(EvalState & state, Env & env, Value & v) { - state.mkPos(v, ptr(&pos)); + state.mkPos(v, pos); } @@ -1808,9 +2072,15 @@ void EvalState::forceValueDeep(Value & v) if (v.type() == nAttrs) { for (auto & i : *v.attrs) try { + // If the value is a thunk, we're evaling. Otherwise no trace necessary. + auto dts = debugRepl && i.value->isThunk() + ? makeDebugTraceStacker(*this, *i.value->thunk.expr, *i.value->thunk.env, positions[i.pos], + "while evaluating the attribute '%1%'", symbols[i.name]) + : nullptr; + recurse(*i.value); } catch (Error & e) { - addErrorTrace(e, *i.pos, "while evaluating the attribute '%1%'", i.name); + addErrorTrace(e, i.pos, "while evaluating the attribute '%1%'", symbols[i.name]); throw; } } @@ -1825,16 +2095,17 @@ void EvalState::forceValueDeep(Value & v) } -NixInt EvalState::forceInt(Value & v, const Pos & pos) +NixInt EvalState::forceInt(Value & v, const PosIdx pos) { forceValue(v, pos); if (v.type() != nInt) throwTypeError(pos, "value is %1% while an integer was expected", v); + return v.integer; } -NixFloat EvalState::forceFloat(Value & v, const Pos & pos) +NixFloat EvalState::forceFloat(Value & v, const PosIdx pos) { forceValue(v, pos); if (v.type() == nInt) @@ -1845,7 +2116,7 @@ NixFloat EvalState::forceFloat(Value & v, const Pos & pos) } -bool EvalState::forceBool(Value & v, const Pos & pos) +bool EvalState::forceBool(Value & v, const PosIdx pos) { forceValue(v, pos); if (v.type() != nBool) @@ -1860,7 +2131,7 @@ bool EvalState::isFunctor(Value & fun) } -void EvalState::forceFunction(Value & v, const Pos & pos) +void EvalState::forceFunction(Value & v, const PosIdx pos) { forceValue(v, pos); if (v.type() != nFunction && !isFunctor(v)) @@ -1868,14 +2139,11 @@ void EvalState::forceFunction(Value & v, const Pos & pos) } -std::string_view EvalState::forceString(Value & v, const Pos & pos) +std::string_view EvalState::forceString(Value & v, const PosIdx pos) { forceValue(v, pos); if (v.type() != nString) { - if (pos) - throwTypeError(pos, "value is %1% while a string was expected", v); - else - throwTypeError("value is %1% while a string was expected", v); + throwTypeError(pos, "value is %1% while a string was expected", v); } return v.string.s; } @@ -1883,13 +2151,22 @@ std::string_view EvalState::forceString(Value & v, const Pos & pos) /* Decode a context string ‘!<name>!<path>’ into a pair <path, name>. */ -std::pair<std::string, std::string> decodeContext(std::string_view s) +NixStringContextElem decodeContext(const Store & store, std::string_view s) { if (s.at(0) == '!') { size_t index = s.find("!", 1); - return {std::string(s.substr(index + 1)), std::string(s.substr(1, index - 1))}; + return { + store.parseStorePath(s.substr(index + 1)), + std::string(s.substr(1, index - 1)), + }; } else - return {s.at(0) == '/' ? std::string(s) : std::string(s.substr(1)), ""}; + return { + store.parseStorePath( + s.at(0) == '/' + ? s + : s.substr(1)), + "", + }; } @@ -1901,18 +2178,18 @@ void copyContext(const Value & v, PathSet & context) } -std::vector<std::pair<Path, std::string>> Value::getContext() +NixStringContext Value::getContext(const Store & store) { - std::vector<std::pair<Path, std::string>> res; + NixStringContext res; assert(internalType == tString); if (string.context) for (const char * * p = string.context; *p; ++p) - res.push_back(decodeContext(*p)); + res.push_back(decodeContext(store, *p)); return res; } -std::string_view EvalState::forceString(Value & v, PathSet & context, const Pos & pos) +std::string_view EvalState::forceString(Value & v, PathSet & context, const PosIdx pos) { auto s = forceString(v, pos); copyContext(v, context); @@ -1920,7 +2197,7 @@ std::string_view EvalState::forceString(Value & v, PathSet & context, const Pos } -std::string_view EvalState::forceStringNoCtx(Value & v, const Pos & pos) +std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos) { auto s = forceString(v, pos); if (v.string.context) { @@ -1940,13 +2217,13 @@ bool EvalState::isDerivation(Value & v) if (v.type() != nAttrs) return false; Bindings::iterator i = v.attrs->find(sType); if (i == v.attrs->end()) return false; - forceValue(*i->value, *i->pos); + forceValue(*i->value, i->pos); if (i->value->type() != nString) return false; return strcmp(i->value->string.s, "derivation") == 0; } -std::optional<std::string> EvalState::tryAttrsToString(const Pos & pos, Value & v, +std::optional<std::string> EvalState::tryAttrsToString(const PosIdx pos, Value & v, PathSet & context, bool coerceMore, bool copyToStore) { auto i = v.attrs->find(sToString); @@ -1959,7 +2236,7 @@ std::optional<std::string> EvalState::tryAttrsToString(const Pos & pos, Value & return {}; } -BackedStringView EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context, +BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet & context, bool coerceMore, bool copyToStore, bool canonicalizePath) { forceValue(v, pos); @@ -1983,15 +2260,15 @@ BackedStringView EvalState::coerceToString(const Pos & pos, Value & v, PathSet & if (maybeString) return std::move(*maybeString); auto i = v.attrs->find(sOutPath); - if (i == v.attrs->end()) throwTypeError(pos, "cannot coerce a set to a string"); + if (i == v.attrs->end()) + throwTypeError(pos, "cannot coerce a set to a string"); return coerceToString(pos, *i->value, context, coerceMore, copyToStore); } if (v.type() == nExternal) - return v.external->coerceToString(pos, context, coerceMore, copyToStore); + return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore); if (coerceMore) { - /* Note that `false' is represented as an empty string for shell scripting convenience, just like `null'. */ if (v.type() == nBool && v.boolean) return "1"; @@ -2041,7 +2318,7 @@ std::string EvalState::copyPathToStore(PathSet & context, const Path & path) } -Path EvalState::coerceToPath(const Pos & pos, Value & v, PathSet & context) +Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context) { auto path = coerceToString(pos, v, context, false, false).toOwned(); if (path == "" || path[0] != '/') @@ -2050,6 +2327,18 @@ Path EvalState::coerceToPath(const Pos & pos, Value & v, PathSet & context) } +StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, PathSet & context) +{ + auto path = coerceToString(pos, v, context, false, false).toOwned(); + if (auto storePath = store->maybeParseStorePath(path)) + return *storePath; + throw EvalError({ + .msg = hintfmt("path '%1%' is not in the Nix store", path), + .errPos = positions[pos] + }); +} + + bool EvalState::eqValues(Value & v1, Value & v2) { forceValue(v1, noPos); @@ -2124,7 +2413,9 @@ bool EvalState::eqValues(Value & v1, Value & v2) return v1.fpoint == v2.fpoint; default: - throwEvalError("cannot compare %1% with %2%", showType(v1), showType(v2)); + throwEvalError("cannot compare %1% with %2%", + showType(v1), + showType(v2)); } } @@ -2210,28 +2501,28 @@ void EvalState::printStats() } { auto list = topObj.list("functions"); - for (auto & i : functionCalls) { + for (auto & [fun, count] : functionCalls) { auto obj = list.object(); - if (i.first->name.set()) - obj.attr("name", (const std::string &) i.first->name); + if (fun->name) + obj.attr("name", (std::string_view) symbols[fun->name]); else obj.attr("name", nullptr); - if (i.first->pos) { - obj.attr("file", (const std::string &) i.first->pos.file); - obj.attr("line", i.first->pos.line); - obj.attr("column", i.first->pos.column); + if (auto pos = positions[fun->pos]) { + obj.attr("file", (std::string_view) pos.file); + obj.attr("line", pos.line); + obj.attr("column", pos.column); } - obj.attr("count", i.second); + obj.attr("count", count); } } { auto list = topObj.list("attributes"); for (auto & i : attrSelects) { auto obj = list.object(); - if (i.first) { - obj.attr("file", (const std::string &) i.first.file); - obj.attr("line", i.first.line); - obj.attr("column", i.first.column); + if (auto pos = positions[i.first]) { + obj.attr("file", (const std::string &) pos.file); + obj.attr("line", pos.line); + obj.attr("column", pos.column); } obj.attr("count", i.second); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 1f0e97b2e..f07f15d43 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -13,7 +13,6 @@ #include <unordered_map> #include <mutex> - namespace nix { @@ -23,18 +22,22 @@ class StorePath; enum RepairFlag : bool; -typedef void (* PrimOpFun) (EvalState & state, const Pos & pos, Value * * args, Value & v); - +typedef void (* PrimOpFun) (EvalState & state, const PosIdx pos, Value * * args, Value & v); struct PrimOp { PrimOpFun fun; size_t arity; - Symbol name; + std::string name; std::vector<std::string> args; const char * doc = nullptr; }; +#if HAVE_BOEHMGC + typedef std::map<std::string, Value *, std::less<std::string>, traceable_allocator<std::pair<const std::string, Value *> > > ValMap; +#else + typedef std::map<std::string, Value *> ValMap; +#endif struct Env { @@ -44,6 +47,10 @@ struct Env Value * values[0]; }; +void printEnvBindings(const EvalState &es, const Expr & expr, const Env & env); +void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, int lvl = 0); + +std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env); void copyContext(const Value & v, PathSet & context); @@ -53,7 +60,9 @@ void copyContext(const Value & v, PathSet & context); typedef std::map<Path, StorePath> SrcToStore; -std::ostream & operator << (std::ostream & str, const Value & v); +std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v); +std::string printValue(const EvalState & state, const Value & v); +std::ostream & operator << (std::ostream & os, const ValueType t); typedef std::pair<std::string, std::string> SearchPathElem; @@ -68,21 +77,34 @@ struct RegexCache; std::shared_ptr<RegexCache> makeRegexCache(); +struct DebugTrace { + std::optional<ErrPos> pos; + const Expr & expr; + const Env & env; + hintformat hint; + bool isError; +}; + +void debugError(Error * e, Env & env, Expr & expr); -class EvalState +class EvalState : public std::enable_shared_from_this<EvalState> { public: SymbolTable symbols; + PosTable positions; + + static inline std::string derivationNixPath = "//builtin/derivation.nix"; const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, sFile, sLine, sColumn, sFunctor, sToString, sRight, sWrong, sStructuredAttrs, sBuilder, sArgs, - sContentAddressed, + sContentAddressed, sImpure, sOutputHash, sOutputHashAlgo, sOutputHashMode, sRecurseForDerivations, sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath, - sPrefix; + sPrefix, + sOutputSpecified; Symbol sDerivationNix; /* If set, force copying files to the Nix store even if they @@ -104,12 +126,56 @@ public: RootValue vCallFlake = nullptr; RootValue vImportedDrvToDerivation = nullptr; + /* Debugger */ + void (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv); + bool debugStop; + bool debugQuit; + int trylevel; + std::list<DebugTrace> debugTraces; + std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs; + const std::shared_ptr<const StaticEnv> getStaticEnv(const Expr & expr) const + { + auto i = exprEnvs.find(&expr); + if (i != exprEnvs.end()) + return i->second; + else + return std::shared_ptr<const StaticEnv>();; + } + + void runDebugRepl(const Error * error, const Env & env, const Expr & expr); + + template<class E> + [[gnu::noinline, gnu::noreturn]] + void debugThrow(E && error, const Env & env, const Expr & expr) + { + if (debugRepl) + runDebugRepl(&error, env, expr); + + throw std::move(error); + } + + template<class E> + [[gnu::noinline, gnu::noreturn]] + void debugThrowLastTrace(E && e) + { + // Call this in the situation where Expr and Env are inaccessible. + // The debugger will start in the last context that's in the + // DebugTrace stack. + if (debugRepl && !debugTraces.empty()) { + const DebugTrace & last = debugTraces.front(); + runDebugRepl(&e, last.env, last.expr); + } + + throw std::move(e); + } + + private: SrcToStore srcToStore; /* A cache from path names to parse trees. */ #if HAVE_BOEHMGC - typedef std::map<Path, Expr *, std::less<Path>, traceable_allocator<std::pair<const Path, Expr *> > > FileParseCache; + typedef std::map<Path, Expr *, std::less<Path>, traceable_allocator<std::pair<const Path, Expr *>>> FileParseCache; #else typedef std::map<Path, Expr *> FileParseCache; #endif @@ -117,7 +183,7 @@ private: /* A cache from path names to values. */ #if HAVE_BOEHMGC - typedef std::map<Path, Value, std::less<Path>, traceable_allocator<std::pair<const Path, Value> > > FileEvalCache; + typedef std::map<Path, Value, std::less<Path>, traceable_allocator<std::pair<const Path, Value>>> FileEvalCache; #else typedef std::map<Path, Value> FileEvalCache; #endif @@ -133,9 +199,14 @@ private: /* Cache used by prim_match(). */ std::shared_ptr<RegexCache> regexCache; +#if HAVE_BOEHMGC /* Allocation cache for GC'd Value objects. */ std::shared_ptr<void *> valueAllocCache; + /* Allocation cache for size-1 Env objects. */ + std::shared_ptr<void *> env1AllocCache; +#endif + public: EvalState( @@ -144,12 +215,6 @@ public: std::shared_ptr<Store> buildStore = nullptr); ~EvalState(); - void requireExperimentalFeatureOnEvaluation( - const ExperimentalFeature &, - const std::string_view fName, - const Pos & pos - ); - void addToSearchPath(const std::string & s); SearchPath getSearchPath() { return searchPath; } @@ -161,6 +226,9 @@ public: the real store path if `store` is a chroot store. */ void allowPath(const StorePath & storePath); + /* Allow access to a store path and return it as a string. */ + void allowAndSetStorePathString(const StorePath & storePath, Value & v); + /* Check whether access to a path is allowed and throw an error if not. Otherwise return the canonicalised path. */ Path checkSourcePath(const Path & path); @@ -178,10 +246,10 @@ public: /* Parse a Nix expression from the specified file. */ Expr * parseExprFromFile(const Path & path); - Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv); + Expr * parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv); /* Parse a Nix expression from the specified string. */ - Expr * parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv); + Expr * parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv); Expr * parseExprFromString(std::string s, const Path & basePath); Expr * parseStdin(); @@ -191,7 +259,7 @@ public: trivial (i.e. doesn't require arbitrary computation). */ void evalFile(const Path & path, Value & v, bool mustBeTrivial = false); - /* Like `cacheFile`, but with an already parsed expression. */ + /* Like `evalFile`, but with an already parsed expression. */ void cacheFile( const Path & path, const Path & resolvedPath, @@ -203,7 +271,7 @@ public: /* Look up a file in the search path. */ Path findFile(const std::string_view path); - Path findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos = noPos); + Path findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos); /* If the specified search path element is a URI, download it. */ std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem); @@ -215,14 +283,14 @@ public: /* Evaluation the expression, then verify that it has the expected type. */ inline bool evalBool(Env & env, Expr * e); - inline bool evalBool(Env & env, Expr * e, const Pos & pos); + inline bool evalBool(Env & env, Expr * e, const PosIdx pos); inline void evalAttrs(Env & env, Expr * e, Value & v); /* If `v' is a thunk, enter it and overwrite `v' with the result of the evaluation of the thunk. If `v' is a delayed function application, call the function and overwrite `v' with the result. Otherwise, this is a no-op. */ - inline void forceValue(Value & v, const Pos & pos); + inline void forceValue(Value & v, const PosIdx pos); template <typename Callable> inline void forceValue(Value & v, Callable getPos); @@ -232,33 +300,103 @@ public: void forceValueDeep(Value & v); /* Force `v', and then verify that it has the expected type. */ - NixInt forceInt(Value & v, const Pos & pos); - NixFloat forceFloat(Value & v, const Pos & pos); - bool forceBool(Value & v, const Pos & pos); + NixInt forceInt(Value & v, const PosIdx pos); + NixFloat forceFloat(Value & v, const PosIdx pos); + bool forceBool(Value & v, const PosIdx pos); - void forceAttrs(Value & v, const Pos & pos); + void forceAttrs(Value & v, const PosIdx pos); template <typename Callable> inline void forceAttrs(Value & v, Callable getPos); - inline void forceList(Value & v, const Pos & pos); - void forceFunction(Value & v, const Pos & pos); // either lambda or primop - std::string_view forceString(Value & v, const Pos & pos = noPos); - std::string_view forceString(Value & v, PathSet & context, const Pos & pos = noPos); - std::string_view forceStringNoCtx(Value & v, const Pos & pos = noPos); + inline void forceList(Value & v, const PosIdx pos); + void forceFunction(Value & v, const PosIdx pos); // either lambda or primop + std::string_view forceString(Value & v, const PosIdx pos = noPos); + std::string_view forceString(Value & v, PathSet & context, const PosIdx pos = noPos); + std::string_view forceStringNoCtx(Value & v, const PosIdx pos = noPos); + + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx pos, const char * s); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx pos, const char * s, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const char * s, const std::string & s2); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx pos, const char * s, const std::string & s2); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const char * s, const std::string & s2, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const char * s, const std::string & s2, const std::string & s3, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const char * s, const std::string & s2, const std::string & s3); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, const std::string & s2, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2, + Env & env, Expr & expr); + + [[gnu::noinline, gnu::noreturn]] + void throwTypeError(const PosIdx pos, const char * s, const Value & v); + [[gnu::noinline, gnu::noreturn]] + void throwTypeError(const PosIdx pos, const char * s, const Value & v, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwTypeError(const PosIdx pos, const char * s); + [[gnu::noinline, gnu::noreturn]] + void throwTypeError(const PosIdx pos, const char * s, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const Symbol s2, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s, const ExprLambda & fun, const Symbol s2, + Env & env, Expr & expr); + [[gnu::noinline, gnu::noreturn]] + void throwTypeError(const char * s, const Value & v, + Env & env, Expr & expr); + + [[gnu::noinline, gnu::noreturn]] + void throwAssertionError(const PosIdx pos, const char * s, const std::string & s1, + Env & env, Expr & expr); + + [[gnu::noinline, gnu::noreturn]] + void throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1, + Env & env, Expr & expr); + + [[gnu::noinline, gnu::noreturn]] + void throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1, + Env & env, Expr & expr); + + [[gnu::noinline]] + void addErrorTrace(Error & e, const char * s, const std::string & s2) const; + [[gnu::noinline]] + void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const; +public: /* Return true iff the value `v' denotes a derivation (i.e. a set with attribute `type = "derivation"'). */ bool isDerivation(Value & v); - std::optional<std::string> tryAttrsToString(const Pos & pos, Value & v, + std::optional<std::string> tryAttrsToString(const PosIdx pos, Value & v, PathSet & context, bool coerceMore = false, bool copyToStore = true); /* String coercion. Converts strings, paths and derivations to a string. If `coerceMore' is set, also converts nulls, integers, booleans and lists to a string. If `copyToStore' is set, referenced paths are copied to the Nix store as a side effect. */ - BackedStringView coerceToString(const Pos & pos, Value & v, PathSet & context, + BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context, bool coerceMore = false, bool copyToStore = true, bool canonicalizePath = true); @@ -267,7 +405,10 @@ public: /* Path coercion. Converts strings, paths and derivations to a path. The result is guaranteed to be a canonicalised, absolute path. Nothing is copied to the store. */ - Path coerceToPath(const Pos & pos, Value & v, PathSet & context); + Path coerceToPath(const PosIdx pos, Value & v, PathSet & context); + + /* Like coerceToPath, but the result must be a store path. */ + StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context); public: @@ -276,7 +417,7 @@ public: Env & baseEnv; /* The same, but used during parsing to resolve variables. */ - StaticEnv staticBaseEnv; // !!! should be private + std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private private: @@ -300,7 +441,7 @@ public: struct Doc { Pos pos; - std::optional<Symbol> name; + std::optional<std::string> name; size_t arity; std::vector<std::string> args; const char * doc; @@ -317,7 +458,7 @@ private: friend struct ExprLet; Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path, - const PathView basePath, StaticEnv & staticEnv); + const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv); public: @@ -328,9 +469,9 @@ public: bool isFunctor(Value & fun); // FIXME: use std::span - void callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const Pos & pos); + void callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos); - void callFunction(Value & fun, Value & arg, Value & vRes, const Pos & pos) + void callFunction(Value & fun, Value & arg, Value & vRes, const PosIdx pos) { Value * args[] = {&arg}; callFunction(fun, 1, args, vRes, pos); @@ -341,10 +482,10 @@ public: void autoCallFunction(Bindings & args, Value & fun, Value & res); /* Allocation primitives. */ - Value * allocValue(); - Env & allocEnv(size_t size); + inline Value * allocValue(); + inline Env & allocEnv(size_t size); - Value * allocAttr(Value & vAttrs, const Symbol & name); + Value * allocAttr(Value & vAttrs, Symbol name); Value * allocAttr(Value & vAttrs, std::string_view name); Bindings * allocBindings(size_t capacity); @@ -356,9 +497,9 @@ public: void mkList(Value & v, size_t length); void mkThunk_(Value & v, Expr * expr); - void mkPos(Value & v, ptr<Pos> pos); + void mkPos(Value & v, PosIdx pos); - void concatLists(Value & v, size_t nrLists, Value * * lists, const Pos & pos); + void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos); /* Print statistics. */ void printStats(); @@ -386,7 +527,7 @@ private: bool countCalls; - typedef std::map<Symbol, size_t> PrimOpCalls; + typedef std::map<std::string, size_t> PrimOpCalls; PrimOpCalls primOpCalls; typedef std::map<ExprLambda *, size_t> FunctionCalls; @@ -394,7 +535,7 @@ private: void incrFunctionCall(ExprLambda * fun); - typedef std::map<Pos, size_t> AttrSelects; + typedef std::map<PosIdx, size_t> AttrSelects; AttrSelects attrSelects; friend struct ExprOpUpdate; @@ -405,13 +546,23 @@ private: friend struct ExprFloat; friend struct ExprPath; friend struct ExprSelect; - friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v); - friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v); - friend void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v); + friend void prim_getAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v); + friend void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v); + friend void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v); friend struct Value; }; +struct DebugTraceStacker { + DebugTraceStacker(EvalState & evalState, DebugTrace t); + ~DebugTraceStacker() + { + // assert(evalState.debugTraces.front() == trace); + evalState.debugTraces.pop_front(); + } + EvalState & evalState; + DebugTrace trace; +}; /* Return a string representing the type of the value `v'. */ std::string_view showType(ValueType type); @@ -419,7 +570,7 @@ std::string showType(const Value & v); /* Decode a context string ‘!<name>!<path>’ into a pair <path, name>. */ -std::pair<std::string, std::string> decodeContext(std::string_view s); +NixStringContextElem decodeContext(const Store & store, std::string_view s); /* If `path' refers to a directory, then append "/default.nix". */ Path resolveExprPath(Path path); @@ -496,6 +647,15 @@ struct EvalSettings : Config Setting<bool> useEvalCache{this, true, "eval-cache", "Whether to use the flake evaluation cache."}; + + Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try", + R"( + If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in + debug mode (using the --debugger flag). By default the debugger will pause on all exceptions. + )"}; + + Setting<bool> traceVerbose{this, false, "trace-verbose", + "Whether `builtins.traceVerbose` should trace its first argument when evaluated."}; }; extern EvalSettings evalSettings; @@ -503,3 +663,5 @@ extern EvalSettings evalSettings; static const std::string corepkgsPrefix{"/__corepkgs__/"}; } + +#include "eval-inline.hh" diff --git a/src/libexpr/fetchurl.nix b/src/libexpr/fetchurl.nix index 02531103b..9d1b61d7f 100644 --- a/src/libexpr/fetchurl.nix +++ b/src/libexpr/fetchurl.nix @@ -12,13 +12,13 @@ , executable ? false , unpack ? false , name ? baseNameOf (toString url) +, impure ? false }: -derivation { +derivation ({ builder = "builtin:fetchurl"; # New-style output content requirements. - inherit outputHashAlgo outputHash; outputHashMode = if unpack || executable then "recursive" else "flat"; inherit name url executable unpack; @@ -38,4 +38,6 @@ derivation { # To make "nix-prefetch-url" work. urls = [ url ]; -} +} // (if impure + then { __impure = true; } + else { inherit outputHashAlgo outputHash; })) diff --git a/src/libexpr/flake/call-flake.nix b/src/libexpr/flake/call-flake.nix index 932ac5e90..8061db3df 100644 --- a/src/libexpr/flake/call-flake.nix +++ b/src/libexpr/flake/call-flake.nix @@ -43,7 +43,7 @@ let outputs = flake.outputs (inputs // { self = result; }); - result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; }; + result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake"; }; in if node.flake or true then assert builtins.isFunction flake.outputs; diff --git a/src/libexpr/flake/config.cc b/src/libexpr/flake/config.cc index 7ecd61816..6df95f1f0 100644 --- a/src/libexpr/flake/config.cc +++ b/src/libexpr/flake/config.cc @@ -1,5 +1,6 @@ #include "flake.hh" #include "globals.hh" +#include "fetch-settings.hh" #include <nlohmann/json.hpp> @@ -30,7 +31,7 @@ static void writeTrustedList(const TrustedList & trustedList) void ConfigFile::apply() { - std::set<std::string> whitelist{"bash-prompt", "bash-prompt-suffix", "flake-registry"}; + std::set<std::string> whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry"}; for (auto & [name, value] : settings) { @@ -49,13 +50,11 @@ void ConfigFile::apply() else assert(false); - if (!whitelist.count(baseName)) { - auto trustedList = readTrustedList(); - + if (!whitelist.count(baseName) && !nix::fetchSettings.acceptFlakeConfig) { bool trusted = false; - if (nix::settings.acceptFlakeConfig){ - trusted = true; - } else if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) { + auto trustedList = readTrustedList(); + auto tlname = get(trustedList, name); + if (auto saved = tlname ? get(*tlname, valueS) : nullptr) { trusted = *saved; warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS); } else { @@ -68,9 +67,8 @@ void ConfigFile::apply() writeTrustedList(trustedList); } } - if (!trusted) { - warn("ignoring untrusted flake configuration setting '%s'", name); + warn("ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", name, "--accept-flake-config"); continue; } } diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index ad0881641..119c556ac 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -6,6 +6,7 @@ #include "store-api.hh" #include "fetchers.hh" #include "finally.hh" +#include "fetch-settings.hh" namespace nix { @@ -71,7 +72,7 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree( return {std::move(tree), resolvedRef, lockedRef}; } -static void forceTrivialValue(EvalState & state, Value & value, const Pos & pos) +static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos) { if (value.isThunk() && value.isTrivial()) state.forceValue(value, pos); @@ -79,20 +80,20 @@ static void forceTrivialValue(EvalState & state, Value & value, const Pos & pos) static void expectType(EvalState & state, ValueType type, - Value & value, const Pos & pos) + Value & value, const PosIdx pos) { forceTrivialValue(state, value, pos); if (value.type() != type) throw Error("expected %s but got %s at %s", - showType(type), showType(value.type()), pos); + showType(type), showType(value.type()), state.positions[pos]); } static std::map<FlakeId, FlakeInput> parseFlakeInputs( - EvalState & state, Value * value, const Pos & pos, + EvalState & state, Value * value, const PosIdx pos, const std::optional<Path> & baseDir, InputPath lockRootPath); static FlakeInput parseFlakeInput(EvalState & state, - const std::string & inputName, Value * value, const Pos & pos, + const std::string & inputName, Value * value, const PosIdx pos, const std::optional<Path> & baseDir, InputPath lockRootPath) { expectType(state, nAttrs, *value, pos); @@ -110,37 +111,39 @@ static FlakeInput parseFlakeInput(EvalState & state, for (nix::Attr attr : *(value->attrs)) { try { if (attr.name == sUrl) { - expectType(state, nString, *attr.value, *attr.pos); + expectType(state, nString, *attr.value, attr.pos); url = attr.value->string.s; attrs.emplace("url", *url); } else if (attr.name == sFlake) { - expectType(state, nBool, *attr.value, *attr.pos); + expectType(state, nBool, *attr.value, attr.pos); input.isFlake = attr.value->boolean; } else if (attr.name == sInputs) { - input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir, lockRootPath); + input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath); } else if (attr.name == sFollows) { - expectType(state, nString, *attr.value, *attr.pos); + expectType(state, nString, *attr.value, attr.pos); auto follows(parseInputPath(attr.value->string.s)); follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end()); input.follows = follows; } else { switch (attr.value->type()) { case nString: - attrs.emplace(attr.name, attr.value->string.s); + attrs.emplace(state.symbols[attr.name], attr.value->string.s); break; case nBool: - attrs.emplace(attr.name, Explicit<bool> { attr.value->boolean }); + attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean }); break; case nInt: - attrs.emplace(attr.name, (long unsigned int)attr.value->integer); + attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer); break; default: throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", - attr.name, showType(*attr.value)); + state.symbols[attr.name], showType(*attr.value)); } } } catch (Error & e) { - e.addTrace(*attr.pos, hintfmt("in flake attribute '%s'", attr.name)); + e.addTrace( + state.positions[attr.pos], + hintfmt("in flake attribute '%s'", state.symbols[attr.name])); throw; } } @@ -149,13 +152,13 @@ static FlakeInput parseFlakeInput(EvalState & state, try { input.ref = FlakeRef::fromAttrs(attrs); } catch (Error & e) { - e.addTrace(pos, hintfmt("in flake input")); + e.addTrace(state.positions[pos], hintfmt("in flake input")); throw; } else { attrs.erase("url"); if (!attrs.empty()) - throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos); + throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, state.positions[pos]); if (url) input.ref = parseFlakeRef(*url, baseDir, true, input.isFlake); } @@ -167,7 +170,7 @@ static FlakeInput parseFlakeInput(EvalState & state, } static std::map<FlakeId, FlakeInput> parseFlakeInputs( - EvalState & state, Value * value, const Pos & pos, + EvalState & state, Value * value, const PosIdx pos, const std::optional<Path> & baseDir, InputPath lockRootPath) { std::map<FlakeId, FlakeInput> inputs; @@ -175,11 +178,11 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs( expectType(state, nAttrs, *value, pos); for (nix::Attr & inputAttr : *(*value).attrs) { - inputs.emplace(inputAttr.name, + inputs.emplace(state.symbols[inputAttr.name], parseFlakeInput(state, - inputAttr.name, + state.symbols[inputAttr.name], inputAttr.value, - *inputAttr.pos, + inputAttr.pos, baseDir, lockRootPath)); } @@ -217,28 +220,28 @@ static Flake getFlake( Value vInfo; state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack - expectType(state, nAttrs, vInfo, Pos(foFile, state.symbols.create(flakeFile), 0, 0)); + expectType(state, nAttrs, vInfo, state.positions.add({flakeFile, foFile}, 0, 0)); if (auto description = vInfo.attrs->get(state.sDescription)) { - expectType(state, nString, *description->value, *description->pos); + expectType(state, nString, *description->value, description->pos); flake.description = description->value->string.s; } auto sInputs = state.symbols.create("inputs"); if (auto inputs = vInfo.attrs->get(sInputs)) - flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir, lockRootPath); + flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakeDir, lockRootPath); auto sOutputs = state.symbols.create("outputs"); if (auto outputs = vInfo.attrs->get(sOutputs)) { - expectType(state, nFunction, *outputs->value, *outputs->pos); + expectType(state, nFunction, *outputs->value, outputs->pos); if (outputs->value->isLambda() && outputs->value->lambda.fun->hasFormals()) { for (auto & formal : outputs->value->lambda.fun->formals->formals) { if (formal.name != state.sSelf) - flake.inputs.emplace(formal.name, FlakeInput { - .ref = parseFlakeRef(formal.name) + flake.inputs.emplace(state.symbols[formal.name], FlakeInput { + .ref = parseFlakeRef(state.symbols[formal.name]) }); } } @@ -249,35 +252,41 @@ static Flake getFlake( auto sNixConfig = state.symbols.create("nixConfig"); if (auto nixConfig = vInfo.attrs->get(sNixConfig)) { - expectType(state, nAttrs, *nixConfig->value, *nixConfig->pos); + expectType(state, nAttrs, *nixConfig->value, nixConfig->pos); for (auto & setting : *nixConfig->value->attrs) { - forceTrivialValue(state, *setting.value, *setting.pos); + forceTrivialValue(state, *setting.value, setting.pos); if (setting.value->type() == nString) - flake.config.settings.insert({setting.name, std::string(state.forceStringNoCtx(*setting.value, *setting.pos))}); + flake.config.settings.emplace( + state.symbols[setting.name], + std::string(state.forceStringNoCtx(*setting.value, setting.pos))); else if (setting.value->type() == nPath) { PathSet emptyContext = {}; flake.config.settings.emplace( - setting.name, - state.coerceToString(*setting.pos, *setting.value, emptyContext, false, true, true) .toOwned()); + state.symbols[setting.name], + state.coerceToString(setting.pos, *setting.value, emptyContext, false, true, true) .toOwned()); } else if (setting.value->type() == nInt) - flake.config.settings.insert({setting.name, state.forceInt(*setting.value, *setting.pos)}); + flake.config.settings.emplace( + state.symbols[setting.name], + state.forceInt(*setting.value, setting.pos)); else if (setting.value->type() == nBool) - flake.config.settings.insert({setting.name, Explicit<bool> { state.forceBool(*setting.value, *setting.pos) }}); + flake.config.settings.emplace( + state.symbols[setting.name], + Explicit<bool> { state.forceBool(*setting.value, setting.pos) }); else if (setting.value->type() == nList) { std::vector<std::string> ss; for (auto elem : setting.value->listItems()) { if (elem->type() != nString) throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected", - setting.name, showType(*setting.value)); - ss.emplace_back(state.forceStringNoCtx(*elem, *setting.pos)); + state.symbols[setting.name], showType(*setting.value)); + ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos)); } - flake.config.settings.insert({setting.name, ss}); + flake.config.settings.emplace(state.symbols[setting.name], ss); } else throw TypeError("flake configuration setting '%s' is %s", - setting.name, showType(*setting.value)); + state.symbols[setting.name], showType(*setting.value)); } } @@ -287,7 +296,7 @@ static Flake getFlake( attr.name != sOutputs && attr.name != sNixConfig) throw Error("flake '%s' has an unsupported attribute '%s', at %s", - lockedRef, attr.name, *attr.pos); + lockedRef, state.symbols[attr.name], state.positions[attr.pos]); } return flake; @@ -315,7 +324,7 @@ LockedFlake lockFlake( FlakeCache flakeCache; - auto useRegistries = lockFlags.useRegistries.value_or(settings.useRegistries); + auto useRegistries = lockFlags.useRegistries.value_or(fetchSettings.useRegistries); auto flake = getFlake(state, topRef, useRegistries, flakeCache); @@ -332,7 +341,6 @@ LockedFlake lockFlake( debug("old lock file: %s", oldLockFile); - // FIXME: check whether all overrides are used. std::map<InputPath, FlakeInput> overrides; std::set<InputPath> overridesUsed, updatesUsed; @@ -375,6 +383,18 @@ LockedFlake lockFlake( } } + /* Check whether this input has overrides for a + non-existent input. */ + for (auto [inputPath, inputOverride] : overrides) { + auto inputPath2(inputPath); + auto follow = inputPath2.back(); + inputPath2.pop_back(); + if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow)) + warn( + "input '%s' has an override for a non-existent input '%s'", + printInputPath(inputPathPrefix), follow); + } + /* Go over the flake inputs, resolve/fetch them if necessary (i.e. if they're new or the flakeref changed from what's in the lock file). */ @@ -463,12 +483,12 @@ LockedFlake lockFlake( } else if (auto follows = std::get_if<1>(&i.second)) { if (! trustLock) { // It is possible that the flake has changed, - // so we must confirm all the follows that are in the lockfile are also in the flake. + // so we must confirm all the follows that are in the lock file are also in the flake. auto overridePath(inputPath); overridePath.push_back(i.first); auto o = overrides.find(overridePath); // If the override disappeared, we have to refetch the flake, - // since some of the inputs may not be present in the lockfile. + // since some of the inputs may not be present in the lock file. if (o == overrides.end()) { mustRefetch = true; // There's no point populating the rest of the fake inputs, @@ -504,6 +524,15 @@ LockedFlake lockFlake( if (!lockFlags.allowMutable && !input.ref->input.isLocked()) throw Error("cannot update flake input '%s' in pure mode", inputPathS); + /* Note: in case of an --override-input, we use + the *original* ref (input2.ref) for the + "original" field, rather than the + override. This ensures that the override isn't + nuked the next time we update the lock + file. That is, overrides are sticky unless you + use --no-write-lock-file. */ + auto ref = input2.ref ? *input2.ref : *input.ref; + if (input.isFlake) { Path localPath = parentPath; FlakeRef localRef = *input.ref; @@ -515,15 +544,7 @@ LockedFlake lockFlake( auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath); - /* Note: in case of an --override-input, we use - the *original* ref (input2.ref) for the - "original" field, rather than the - override. This ensures that the override isn't - nuked the next time we update the lock - file. That is, overrides are sticky unless you - use --no-write-lock-file. */ - auto childNode = std::make_shared<LockedNode>( - inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref); + auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref); node->inputs.insert_or_assign(id, childNode); @@ -551,7 +572,7 @@ LockedFlake lockFlake( auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree( state, *input.ref, useRegistries, flakeCache); node->inputs.insert_or_assign(id, - std::make_shared<LockedNode>(lockedRef, *input.ref, false)); + std::make_shared<LockedNode>(lockedRef, ref, false)); } } @@ -591,7 +612,7 @@ LockedFlake lockFlake( if (lockFlags.writeLockFile) { if (auto sourcePath = topRef.input.getSourcePath()) { if (!newLockFile.isImmutable()) { - if (settings.warnDirty) + if (fetchSettings.warnDirty) warn("will not write lock file of flake '%s' because it has a mutable input", topRef); } else { if (!lockFlags.updateLockFile) @@ -618,7 +639,7 @@ LockedFlake lockFlake( if (lockFlags.commitLockFile) { std::string cm; - cm = settings.commitLockFileSummary.get(); + cm = fetchSettings.commitLockFileSummary.get(); if (cm == "") { cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add"); @@ -703,26 +724,48 @@ void callFlake(EvalState & state, state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos); } -static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - state.requireExperimentalFeatureOnEvaluation(Xp::Flakes, "builtins.getFlake", pos); - std::string flakeRefS(state.forceStringNoCtx(*args[0], pos)); auto flakeRef = parseFlakeRef(flakeRefS, {}, true); if (evalSettings.pureEval && !flakeRef.input.isLocked()) - throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, pos); + throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, state.positions[pos]); callFlake(state, lockFlake(state, flakeRef, LockFlags { .updateLockFile = false, - .useRegistries = !evalSettings.pureEval && settings.useRegistries, + .writeLockFile = false, + .useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries, .allowMutable = !evalSettings.pureEval, }), v); } -static RegisterPrimOp r2("__getFlake", 1, prim_getFlake); +static RegisterPrimOp r2({ + .name = "__getFlake", + .args = {"args"}, + .doc = R"( + Fetch a flake from a flake reference, and return its output attributes and some metadata. For example: + + ```nix + (builtins.getFlake "nix/55bc52401966fbffa525c574c14f67b00bc4fb3a").packages.x86_64-linux.nix + ``` + + Unless impure evaluation is allowed (`--impure`), the flake reference + must be "locked", e.g. contain a Git revision or content hash. An + example of an unlocked usage is: + + ```nix + (builtins.getFlake "github:edolstra/dwarffs").rev + ``` + + This function is only available if you enable the experimental feature + `flakes`. + )", + .fun = prim_getFlake, + .experimentalFeature = Xp::Flakes, +}); } diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index 930ed9ccd..eede493f8 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -98,7 +98,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment( if (std::regex_match(url, match, flakeRegex)) { auto parsedURL = ParsedURL{ .url = url, - .base = "flake:" + std::string(match[1]), + .base = "flake:" + match.str(1), .scheme = "flake", .authority = "", .path = match[1], @@ -106,12 +106,12 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment( return std::make_pair( FlakeRef(Input::fromURL(parsedURL), ""), - percentDecode(std::string(match[6]))); + percentDecode(match.str(6))); } else if (std::regex_match(url, match, pathUrlRegex)) { std::string path = match[1]; - std::string fragment = percentDecode(std::string(match[3])); + std::string fragment = percentDecode(match.str(3)); if (baseDir) { /* Check if 'url' is a path (either absolute or relative @@ -176,7 +176,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment( parsedURL.query.insert_or_assign("shallow", "1"); return std::make_pair( - FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")), + FlakeRef(Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")), fragment); } @@ -189,7 +189,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment( if (!hasPrefix(path, "/")) throw BadURL("flake reference '%s' is not an absolute path", url); auto query = decodeQuery(match[2]); - path = canonPath(path + "/" + get(query, "dir").value_or("")); + path = canonPath(path + "/" + getOr(query, "dir", "")); } fetchers::Attrs attrs; @@ -208,7 +208,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment( input.parent = baseDir; return std::make_pair( - FlakeRef(std::move(input), get(parsedURL.query, "dir").value_or("")), + FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")), fragment); } } @@ -238,4 +238,15 @@ std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)}; } +std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec( + const std::string & url, + const std::optional<Path> & baseDir, + bool allowMissing, + bool isFlake) +{ + auto [prefix, outputsSpec] = parseOutputsSpec(url); + auto [flakeRef, fragment] = parseFlakeRefWithFragment(prefix, baseDir, allowMissing, isFlake); + return {std::move(flakeRef), fragment, outputsSpec}; +} + } diff --git a/src/libexpr/flake/flakeref.hh b/src/libexpr/flake/flakeref.hh index 1fddfd9a0..fe4f67193 100644 --- a/src/libexpr/flake/flakeref.hh +++ b/src/libexpr/flake/flakeref.hh @@ -3,6 +3,7 @@ #include "types.hh" #include "hash.hh" #include "fetchers.hh" +#include "path-with-outputs.hh" #include <variant> @@ -27,7 +28,7 @@ typedef std::string FlakeId; * object that fetcher generates (usually via * FlakeRef::fromAttrs(attrs) or parseFlakeRef(url) calls). * - * The actual fetch not have been performed yet (i.e. a FlakeRef may + * The actual fetch may not have been performed yet (i.e. a FlakeRef may * be lazy), but the fetcher can be invoked at any time via the * FlakeRef to ensure the store is populated with this input. */ @@ -79,4 +80,11 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment( std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment( const std::string & url, const std::optional<Path> & baseDir = {}); +std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec( + const std::string & url, + const std::optional<Path> & baseDir = {}, + bool allowMissing = false, + bool isFlake = true); + + } diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc index 60b52d578..629d2e669 100644 --- a/src/libexpr/flake/lockfile.cc +++ b/src/libexpr/flake/lockfile.cc @@ -36,7 +36,7 @@ LockedNode::LockedNode(const nlohmann::json & json) , isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true) { if (!lockedRef.input.isLocked()) - throw Error("lockfile contains mutable lock '%s'", + throw Error("lock file contains mutable lock '%s'", fetchers::attrsToJSON(lockedRef.input.toAttrs())); } diff --git a/src/libexpr/function-trace.hh b/src/libexpr/function-trace.hh index 472f2045e..e9a2526bd 100644 --- a/src/libexpr/function-trace.hh +++ b/src/libexpr/function-trace.hh @@ -8,7 +8,7 @@ namespace nix { struct FunctionCallTrace { - const Pos & pos; + const Pos pos; FunctionCallTrace(const Pos & pos); ~FunctionCallTrace(); }; diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 8393e4225..346741dd5 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -1,6 +1,7 @@ #include "get-drvs.hh" #include "util.hh" #include "eval-inline.hh" +#include "derivations.hh" #include "store-api.hh" #include "path-with-outputs.hh" @@ -22,7 +23,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat { auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs); - this->drvPath = store->printStorePath(drvPath); + this->drvPath = drvPath; auto drv = store->derivationFromPath(drvPath); @@ -33,7 +34,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat outputName = selectedOutputs.empty() - ? get(drv.env, "outputName").value_or("out") + ? getOr(drv.env, "outputName", "out") : *selectedOutputs.begin(); auto i = drv.outputs.find(outputName); @@ -41,9 +42,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName); auto & [outputName, output] = *i; - auto optStorePath = output.path(*store, drv.name, outputName); - if (optStorePath) - outPath = store->printStorePath(*optStorePath); + outPath = {output.path(*store, drv.name, outputName)}; } @@ -62,30 +61,41 @@ std::string DrvInfo::querySystem() const { if (system == "" && attrs) { auto i = attrs->find(state->sSystem); - system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, *i->pos); + system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, i->pos); } return system; } -std::string DrvInfo::queryDrvPath() const +std::optional<StorePath> DrvInfo::queryDrvPath() const { - if (drvPath == "" && attrs) { + if (!drvPath && attrs) { Bindings::iterator i = attrs->find(state->sDrvPath); PathSet context; - drvPath = i != attrs->end() ? state->coerceToPath(*i->pos, *i->value, context) : ""; + if (i == attrs->end()) + drvPath = {std::nullopt}; + else + drvPath = {state->coerceToStorePath(i->pos, *i->value, context)}; } - return drvPath; + return drvPath.value_or(std::nullopt); } -std::string DrvInfo::queryOutPath() const +StorePath DrvInfo::requireDrvPath() const +{ + if (auto drvPath = queryDrvPath()) + return *drvPath; + throw Error("derivation does not contain a 'drvPath' attribute"); +} + + +StorePath DrvInfo::queryOutPath() const { if (!outPath && attrs) { Bindings::iterator i = attrs->find(state->sOutPath); PathSet context; if (i != attrs->end()) - outPath = state->coerceToPath(*i->pos, *i->value, context); + outPath = state->coerceToStorePath(i->pos, *i->value, context); } if (!outPath) throw UnimplementedError("CA derivations are not yet supported"); @@ -93,48 +103,65 @@ std::string DrvInfo::queryOutPath() const } -DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall) +DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall) { if (outputs.empty()) { /* Get the ‘outputs’ list. */ Bindings::iterator i; if (attrs && (i = attrs->find(state->sOutputs)) != attrs->end()) { - state->forceList(*i->value, *i->pos); + state->forceList(*i->value, i->pos); /* For each output... */ for (auto elem : i->value->listItems()) { - /* Evaluate the corresponding set. */ - std::string name(state->forceStringNoCtx(*elem, *i->pos)); - Bindings::iterator out = attrs->find(state->symbols.create(name)); - if (out == attrs->end()) continue; // FIXME: throw error? - state->forceAttrs(*out->value, *i->pos); - - /* And evaluate its ‘outPath’ attribute. */ - Bindings::iterator outPath = out->value->attrs->find(state->sOutPath); - if (outPath == out->value->attrs->end()) continue; // FIXME: throw error? - PathSet context; - outputs[name] = state->coerceToPath(*outPath->pos, *outPath->value, context); + std::string output(state->forceStringNoCtx(*elem, i->pos)); + + if (withPaths) { + /* Evaluate the corresponding set. */ + Bindings::iterator out = attrs->find(state->symbols.create(output)); + if (out == attrs->end()) continue; // FIXME: throw error? + state->forceAttrs(*out->value, i->pos); + + /* And evaluate its ‘outPath’ attribute. */ + Bindings::iterator outPath = out->value->attrs->find(state->sOutPath); + if (outPath == out->value->attrs->end()) continue; // FIXME: throw error? + PathSet context; + outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context)); + } else + outputs.emplace(output, std::nullopt); } } else - outputs["out"] = queryOutPath(); + outputs.emplace("out", withPaths ? std::optional{queryOutPath()} : std::nullopt); } + if (!onlyOutputsToInstall || !attrs) return outputs; - /* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */ - const Value * outTI = queryMeta("outputsToInstall"); - if (!outTI) return outputs; - const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'"); - /* ^ this shows during `nix-env -i` right under the bad derivation */ - if (!outTI->isList()) throw errMsg; - Outputs result; - for (auto elem : outTI->listItems()) { - if (elem->type() != nString) throw errMsg; - auto out = outputs.find(elem->string.s); - if (out == outputs.end()) throw errMsg; + Bindings::iterator i; + if (attrs && (i = attrs->find(state->sOutputSpecified)) != attrs->end() && state->forceBool(*i->value, i->pos)) { + Outputs result; + auto out = outputs.find(queryOutputName()); + if (out == outputs.end()) + throw Error("derivation does not have output '%s'", queryOutputName()); result.insert(*out); + return result; + } + + else { + /* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */ + const Value * outTI = queryMeta("outputsToInstall"); + if (!outTI) return outputs; + const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'"); + /* ^ this shows during `nix-env -i` right under the bad derivation */ + if (!outTI->isList()) throw errMsg; + Outputs result; + for (auto elem : outTI->listItems()) { + if (elem->type() != nString) throw errMsg; + auto out = outputs.find(elem->string.s); + if (out == outputs.end()) throw errMsg; + result.insert(*out); + } + return result; } - return result; } @@ -154,7 +181,7 @@ Bindings * DrvInfo::getMeta() if (!attrs) return 0; Bindings::iterator a = attrs->find(state->sMeta); if (a == attrs->end()) return 0; - state->forceAttrs(*a->value, *a->pos); + state->forceAttrs(*a->value, a->pos); meta = a->value->attrs; return meta; } @@ -165,7 +192,7 @@ StringSet DrvInfo::queryMetaNames() StringSet res; if (!getMeta()) return res; for (auto & i : *meta) - res.insert(i.name); + res.emplace(state->symbols[i.name]); return res; } @@ -255,7 +282,7 @@ void DrvInfo::setMeta(const std::string & name, Value * v) { getMeta(); auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0)); - Symbol sym = state->symbols.create(name); + auto sym = state->symbols.create(name); if (meta) for (auto i : *meta) if (i.name != sym) @@ -342,11 +369,11 @@ static void getDerivations(EvalState & state, Value & vIn, there are names clashes between derivations, the derivation bound to the attribute with the "lower" name should take precedence). */ - for (auto & i : v.attrs->lexicographicOrder()) { - debug("evaluating attribute '%1%'", i->name); - if (!std::regex_match(std::string(i->name), attrRegex)) + for (auto & i : v.attrs->lexicographicOrder(state.symbols)) { + debug("evaluating attribute '%1%'", state.symbols[i->name]); + if (!std::regex_match(std::string(state.symbols[i->name]), attrRegex)) continue; - std::string pathPrefix2 = addToPath(pathPrefix, i->name); + std::string pathPrefix2 = addToPath(pathPrefix, state.symbols[i->name]); if (combineChannels) getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); else if (getDerivation(state, *i->value, pathPrefix2, drvs, done, ignoreAssertionFailures)) { @@ -355,7 +382,7 @@ static void getDerivations(EvalState & state, Value & vIn, `recurseForDerivations = true' attribute. */ if (i->value->type() == nAttrs) { Bindings::iterator j = i->value->attrs->find(state.sRecurseForDerivations); - if (j != i->value->attrs->end() && state.forceBool(*j->value, *j->pos)) + if (j != i->value->attrs->end() && state.forceBool(*j->value, j->pos)) getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures); } } diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index d13847785..bbd2d3c47 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -1,6 +1,7 @@ #pragma once #include "eval.hh" +#include "path.hh" #include <string> #include <map> @@ -12,15 +13,15 @@ namespace nix { struct DrvInfo { public: - typedef std::map<std::string, Path> Outputs; + typedef std::map<std::string, std::optional<StorePath>> Outputs; private: EvalState * state; mutable std::string name; mutable std::string system; - mutable std::string drvPath; - mutable std::optional<std::string> outPath; + mutable std::optional<std::optional<StorePath>> drvPath; + mutable std::optional<StorePath> outPath; mutable std::string outputName; Outputs outputs; @@ -41,11 +42,13 @@ public: std::string queryName() const; std::string querySystem() const; - std::string queryDrvPath() const; - std::string queryOutPath() const; + std::optional<StorePath> queryDrvPath() const; + StorePath requireDrvPath() const; + StorePath queryOutPath() const; std::string queryOutputName() const; - /** Return the list of outputs. The "outputs to install" are determined by `meta.outputsToInstall`. */ - Outputs queryOutputs(bool onlyOutputsToInstall = false); + /** Return the unordered map of output names to (optional) output paths. + * The "outputs to install" are determined by `meta.outputsToInstall`. */ + Outputs queryOutputs(bool withPaths = true, bool onlyOutputsToInstall = false); StringSet queryMetaNames(); Value * queryMeta(const std::string & name); @@ -61,8 +64,8 @@ public: */ void setName(const std::string & s) { name = s; } - void setDrvPath(const std::string & s) { drvPath = s; } - void setOutPath(const std::string & s) { outPath = s; } + void setDrvPath(StorePath path) { drvPath = {{std::move(path)}}; } + void setOutPath(StorePath path) { outPath = {{std::move(path)}}; } void setFailed() { failed = true; }; bool hasFailed() { return failed; }; @@ -70,7 +73,7 @@ public: #if HAVE_BOEHMGC -typedef std::list<DrvInfo, traceable_allocator<DrvInfo> > DrvInfos; +typedef std::list<DrvInfo, traceable_allocator<DrvInfo>> DrvInfos; #else typedef std::list<DrvInfo> DrvInfos; #endif diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index e276b0467..462b3b602 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -28,6 +28,13 @@ using namespace nix; namespace nix { +static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) +{ + return data->state.positions.add(data->origin, loc.first_line, loc.first_column); +} + +#define CUR_POS makeCurPos(*yylloc, data) + // backup to recover from yyless(0) YYLTYPE prev_yylloc; @@ -37,7 +44,6 @@ static void initLoc(YYLTYPE * loc) loc->first_column = loc->last_column = 1; } - static void adjustLoc(YYLTYPE * loc, const char * s, size_t len) { prev_yylloc = *loc; @@ -147,14 +153,20 @@ or { return OR_KW; } try { yylval->n = boost::lexical_cast<int64_t>(yytext); } catch (const boost::bad_lexical_cast &) { - throw ParseError("invalid integer '%1%'", yytext); + throw ParseError({ + .msg = hintfmt("invalid integer '%1%'", yytext), + .errPos = data->state.positions[CUR_POS], + }); } return INT; } {FLOAT} { errno = 0; yylval->nf = strtod(yytext, 0); if (errno != 0) - throw ParseError("invalid float '%1%'", yytext); + throw ParseError({ + .msg = hintfmt("invalid float '%1%'", yytext), + .errPos = data->state.positions[CUR_POS], + }); return FLOAT; } @@ -186,7 +198,7 @@ or { return OR_KW; } (...|\$[^\{\"\\]|\\.|\$\\.)+ would have triggered. This is technically invalid, but we leave the problem to the parser who fails with exact location. */ - return STR; + return EOF; } \'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; } @@ -280,7 +292,10 @@ or { return OR_KW; } <INPATH_SLASH>{ANY} | <INPATH_SLASH><<EOF>> { - throw ParseError("path has a trailing slash"); + throw ParseError({ + .msg = hintfmt("path has a trailing slash"), + .errPos = data->state.positions[CUR_POS], + }); } {SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index a2def65a6..7c623a07d 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -1,21 +1,15 @@ #include "nixexpr.hh" #include "derivations.hh" +#include "eval.hh" +#include "symbol-table.hh" #include "util.hh" #include <cstdlib> - namespace nix { - /* Displaying abstract syntax trees. */ -std::ostream & operator << (std::ostream & str, const Expr & e) -{ - e.show(str); - return str; -} - static void showString(std::ostream & str, std::string_view s) { str << '"'; @@ -28,8 +22,10 @@ static void showString(std::ostream & str, std::string_view s) str << '"'; } -static void showId(std::ostream & str, std::string_view s) +std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol) { + std::string_view s = symbol; + if (s.empty()) str << "\"\""; else if (s == "if") // FIXME: handle other keywords @@ -38,7 +34,7 @@ static void showId(std::ostream & str, std::string_view s) char c = s[0]; if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_')) { showString(str, s); - return; + return str; } for (auto c : s) if (!((c >= 'a' && c <= 'z') || @@ -46,89 +42,104 @@ static void showId(std::ostream & str, std::string_view s) (c >= '0' && c <= '9') || c == '_' || c == '\'' || c == '-')) { showString(str, s); - return; + return str; } str << s; } -} - -std::ostream & operator << (std::ostream & str, const Symbol & sym) -{ - showId(str, *sym.s); return str; } -void Expr::show(std::ostream & str) const +void Expr::show(const SymbolTable & symbols, std::ostream & str) const { abort(); } -void ExprInt::show(std::ostream & str) const +void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const { str << n; } -void ExprFloat::show(std::ostream & str) const +void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const { str << nf; } -void ExprString::show(std::ostream & str) const +void ExprString::show(const SymbolTable & symbols, std::ostream & str) const { showString(str, s); } -void ExprPath::show(std::ostream & str) const +void ExprPath::show(const SymbolTable & symbols, std::ostream & str) const { str << s; } -void ExprVar::show(std::ostream & str) const +void ExprVar::show(const SymbolTable & symbols, std::ostream & str) const { - str << name; + str << symbols[name]; } -void ExprSelect::show(std::ostream & str) const +void ExprSelect::show(const SymbolTable & symbols, std::ostream & str) const { - str << "(" << *e << ")." << showAttrPath(attrPath); - if (def) str << " or (" << *def << ")"; + str << "("; + e->show(symbols, str); + str << ")." << showAttrPath(symbols, attrPath); + if (def) { + str << " or ("; + def->show(symbols, str); + str << ")"; + } } -void ExprOpHasAttr::show(std::ostream & str) const +void ExprOpHasAttr::show(const SymbolTable & symbols, std::ostream & str) const { - str << "((" << *e << ") ? " << showAttrPath(attrPath) << ")"; + str << "(("; + e->show(symbols, str); + str << ") ? " << showAttrPath(symbols, attrPath) << ")"; } -void ExprAttrs::show(std::ostream & str) const +void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const { if (recursive) str << "rec "; str << "{ "; typedef const decltype(attrs)::value_type * Attr; std::vector<Attr> sorted; for (auto & i : attrs) sorted.push_back(&i); - std::sort(sorted.begin(), sorted.end(), [](Attr a, Attr b) { - return (const std::string &) a->first < (const std::string &) b->first; - }); + std::sort(sorted.begin(), sorted.end(), [&](Attr a, Attr b) { + std::string_view sa = symbols[a->first], sb = symbols[b->first]; + return sa < sb; + }); for (auto & i : sorted) { if (i->second.inherited) - str << "inherit " << i->first << " " << "; "; - else - str << i->first << " = " << *i->second.e << "; "; + str << "inherit " << symbols[i->first] << " " << "; "; + else { + str << symbols[i->first] << " = "; + i->second.e->show(symbols, str); + str << "; "; + } + } + for (auto & i : dynamicAttrs) { + str << "\"${"; + i.nameExpr->show(symbols, str); + str << "}\" = "; + i.valueExpr->show(symbols, str); + str << "; "; } - for (auto & i : dynamicAttrs) - str << "\"${" << *i.nameExpr << "}\" = " << *i.valueExpr << "; "; str << "}"; } -void ExprList::show(std::ostream & str) const +void ExprList::show(const SymbolTable & symbols, std::ostream & str) const { str << "[ "; - for (auto & i : elems) - str << "(" << *i << ") "; + for (auto & i : elems) { + str << "("; + i->show(symbols, str); + str << ") "; + } str << "]"; } -void ExprLambda::show(std::ostream & str) const +void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const { str << "("; if (hasFormals()) { @@ -136,74 +147,100 @@ void ExprLambda::show(std::ostream & str) const bool first = true; for (auto & i : formals->formals) { if (first) first = false; else str << ", "; - str << i.name; - if (i.def) str << " ? " << *i.def; + str << symbols[i.name]; + if (i.def) { + str << " ? "; + i.def->show(symbols, str); + } } if (formals->ellipsis) { if (!first) str << ", "; str << "..."; } str << " }"; - if (!arg.empty()) str << " @ "; + if (arg) str << " @ "; } - if (!arg.empty()) str << arg; - str << ": " << *body << ")"; + if (arg) str << symbols[arg]; + str << ": "; + body->show(symbols, str); + str << ")"; } -void ExprCall::show(std::ostream & str) const +void ExprCall::show(const SymbolTable & symbols, std::ostream & str) const { - str << '(' << *fun; + str << '('; + fun->show(symbols, str); for (auto e : args) { str << ' '; - str << *e; + e->show(symbols, str); } str << ')'; } -void ExprLet::show(std::ostream & str) const +void ExprLet::show(const SymbolTable & symbols, std::ostream & str) const { str << "(let "; for (auto & i : attrs->attrs) if (i.second.inherited) { - str << "inherit " << i.first << "; "; + str << "inherit " << symbols[i.first] << "; "; } - else - str << i.first << " = " << *i.second.e << "; "; - str << "in " << *body << ")"; + else { + str << symbols[i.first] << " = "; + i.second.e->show(symbols, str); + str << "; "; + } + str << "in "; + body->show(symbols, str); + str << ")"; } -void ExprWith::show(std::ostream & str) const +void ExprWith::show(const SymbolTable & symbols, std::ostream & str) const { - str << "(with " << *attrs << "; " << *body << ")"; + str << "(with "; + attrs->show(symbols, str); + str << "; "; + body->show(symbols, str); + str << ")"; } -void ExprIf::show(std::ostream & str) const +void ExprIf::show(const SymbolTable & symbols, std::ostream & str) const { - str << "(if " << *cond << " then " << *then << " else " << *else_ << ")"; + str << "(if "; + cond->show(symbols, str); + str << " then "; + then->show(symbols, str); + str << " else "; + else_->show(symbols, str); + str << ")"; } -void ExprAssert::show(std::ostream & str) const +void ExprAssert::show(const SymbolTable & symbols, std::ostream & str) const { - str << "assert " << *cond << "; " << *body; + str << "assert "; + cond->show(symbols, str); + str << "; "; + body->show(symbols, str); } -void ExprOpNot::show(std::ostream & str) const +void ExprOpNot::show(const SymbolTable & symbols, std::ostream & str) const { - str << "(! " << *e << ")"; + str << "(! "; + e->show(symbols, str); + str << ")"; } -void ExprConcatStrings::show(std::ostream & str) const +void ExprConcatStrings::show(const SymbolTable & symbols, std::ostream & str) const { bool first = true; str << "("; for (auto & i : *es) { if (first) first = false; else str << " + "; - str << *i.second; + i.second->show(symbols, str); } str << ")"; } -void ExprPos::show(std::ostream & str) const +void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const { str << "__curPos"; } @@ -234,55 +271,67 @@ std::ostream & operator << (std::ostream & str, const Pos & pos) } -std::string showAttrPath(const AttrPath & attrPath) +std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) { std::ostringstream out; bool first = true; for (auto & i : attrPath) { if (!first) out << '.'; else first = false; - if (i.symbol.set()) - out << i.symbol; - else - out << "\"${" << *i.expr << "}\""; + if (i.symbol) + out << symbols[i.symbol]; + else { + out << "\"${"; + i.expr->show(symbols, out); + out << "}\""; + } } return out.str(); } -Pos noPos; - /* Computing levels/displacements for variables. */ -void Expr::bindVars(const StaticEnv & env) +void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { abort(); } -void ExprInt::bindVars(const StaticEnv & env) +void ExprInt::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); } -void ExprFloat::bindVars(const StaticEnv & env) +void ExprFloat::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); } -void ExprString::bindVars(const StaticEnv & env) +void ExprString::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); } -void ExprPath::bindVars(const StaticEnv & env) +void ExprPath::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); } -void ExprVar::bindVars(const StaticEnv & env) +void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + /* Check whether the variable appears in the environment. If so, set its level and displacement. */ const StaticEnv * curEnv; Level level; int withLevel = -1; - for (curEnv = &env, level = 0; curEnv; curEnv = curEnv->up, level++) { + for (curEnv = env.get(), level = 0; curEnv; curEnv = curEnv->up, level++) { if (curEnv->isWith) { if (withLevel == -1) withLevel = level; } else { @@ -301,176 +350,222 @@ void ExprVar::bindVars(const StaticEnv & env) "undefined variable" error now. */ if (withLevel == -1) throw UndefinedVarError({ - .msg = hintfmt("undefined variable '%1%'", name), - .errPos = pos + .msg = hintfmt("undefined variable '%1%'", es.symbols[name]), + .errPos = es.positions[pos] }); fromWith = true; this->level = withLevel; } -void ExprSelect::bindVars(const StaticEnv & env) +void ExprSelect::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - e->bindVars(env); - if (def) def->bindVars(env); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + e->bindVars(es, env); + if (def) def->bindVars(es, env); for (auto & i : attrPath) - if (!i.symbol.set()) - i.expr->bindVars(env); + if (!i.symbol) + i.expr->bindVars(es, env); } -void ExprOpHasAttr::bindVars(const StaticEnv & env) +void ExprOpHasAttr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - e->bindVars(env); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + e->bindVars(es, env); for (auto & i : attrPath) - if (!i.symbol.set()) - i.expr->bindVars(env); + if (!i.symbol) + i.expr->bindVars(es, env); } -void ExprAttrs::bindVars(const StaticEnv & env) +void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - const StaticEnv * dynamicEnv = &env; - StaticEnv newEnv(false, &env, recursive ? attrs.size() : 0); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { - dynamicEnv = &newEnv; + auto newEnv = std::make_shared<StaticEnv>(false, env.get(), recursive ? attrs.size() : 0); Displacement displ = 0; for (auto & i : attrs) - newEnv.vars.emplace_back(i.first, i.second.displ = displ++); + newEnv->vars.emplace_back(i.first, i.second.displ = displ++); // No need to sort newEnv since attrs is in sorted order. for (auto & i : attrs) - i.second.e->bindVars(i.second.inherited ? env : newEnv); - } + i.second.e->bindVars(es, i.second.inherited ? env : newEnv); - else + for (auto & i : dynamicAttrs) { + i.nameExpr->bindVars(es, newEnv); + i.valueExpr->bindVars(es, newEnv); + } + } + else { for (auto & i : attrs) - i.second.e->bindVars(env); + i.second.e->bindVars(es, env); - for (auto & i : dynamicAttrs) { - i.nameExpr->bindVars(*dynamicEnv); - i.valueExpr->bindVars(*dynamicEnv); + for (auto & i : dynamicAttrs) { + i.nameExpr->bindVars(es, env); + i.valueExpr->bindVars(es, env); + } } } -void ExprList::bindVars(const StaticEnv & env) +void ExprList::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + for (auto & i : elems) - i->bindVars(env); + i->bindVars(es, env); } -void ExprLambda::bindVars(const StaticEnv & env) +void ExprLambda::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - StaticEnv newEnv( - false, &env, + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + auto newEnv = std::make_shared<StaticEnv>( + false, env.get(), (hasFormals() ? formals->formals.size() : 0) + - (arg.empty() ? 0 : 1)); + (!arg ? 0 : 1)); Displacement displ = 0; - if (!arg.empty()) newEnv.vars.emplace_back(arg, displ++); + if (arg) newEnv->vars.emplace_back(arg, displ++); if (hasFormals()) { for (auto & i : formals->formals) - newEnv.vars.emplace_back(i.name, displ++); + newEnv->vars.emplace_back(i.name, displ++); - newEnv.sort(); + newEnv->sort(); for (auto & i : formals->formals) - if (i.def) i.def->bindVars(newEnv); + if (i.def) i.def->bindVars(es, newEnv); } - body->bindVars(newEnv); + body->bindVars(es, newEnv); } -void ExprCall::bindVars(const StaticEnv & env) +void ExprCall::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - fun->bindVars(env); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + fun->bindVars(es, env); for (auto e : args) - e->bindVars(env); + e->bindVars(es, env); } -void ExprLet::bindVars(const StaticEnv & env) +void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - StaticEnv newEnv(false, &env, attrs->attrs.size()); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + auto newEnv = std::make_shared<StaticEnv>(false, env.get(), attrs->attrs.size()); Displacement displ = 0; for (auto & i : attrs->attrs) - newEnv.vars.emplace_back(i.first, i.second.displ = displ++); + newEnv->vars.emplace_back(i.first, i.second.displ = displ++); // No need to sort newEnv since attrs->attrs is in sorted order. for (auto & i : attrs->attrs) - i.second.e->bindVars(i.second.inherited ? env : newEnv); + i.second.e->bindVars(es, i.second.inherited ? env : newEnv); - body->bindVars(newEnv); + body->bindVars(es, newEnv); } -void ExprWith::bindVars(const StaticEnv & env) +void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + /* Does this `with' have an enclosing `with'? If so, record its level so that `lookupVar' can look up variables in the previous `with' if this one doesn't contain the desired attribute. */ const StaticEnv * curEnv; Level level; prevWith = 0; - for (curEnv = &env, level = 1; curEnv; curEnv = curEnv->up, level++) + for (curEnv = env.get(), level = 1; curEnv; curEnv = curEnv->up, level++) if (curEnv->isWith) { prevWith = level; break; } - attrs->bindVars(env); - StaticEnv newEnv(true, &env); - body->bindVars(newEnv); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + attrs->bindVars(es, env); + auto newEnv = std::make_shared<StaticEnv>(true, env.get()); + body->bindVars(es, newEnv); } -void ExprIf::bindVars(const StaticEnv & env) +void ExprIf::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - cond->bindVars(env); - then->bindVars(env); - else_->bindVars(env); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + cond->bindVars(es, env); + then->bindVars(es, env); + else_->bindVars(es, env); } -void ExprAssert::bindVars(const StaticEnv & env) +void ExprAssert::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - cond->bindVars(env); - body->bindVars(env); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + cond->bindVars(es, env); + body->bindVars(es, env); } -void ExprOpNot::bindVars(const StaticEnv & env) +void ExprOpNot::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - e->bindVars(env); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + e->bindVars(es, env); } -void ExprConcatStrings::bindVars(const StaticEnv & env) +void ExprConcatStrings::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { - for (auto & i : *es) - i.second->bindVars(env); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); + + for (auto & i : *this->es) + i.second->bindVars(es, env); } -void ExprPos::bindVars(const StaticEnv & env) +void ExprPos::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) { + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, env)); } /* Storing function names. */ -void Expr::setName(Symbol & name) +void Expr::setName(Symbol name) { } -void ExprLambda::setName(Symbol & name) +void ExprLambda::setName(Symbol name) { this->name = name; body->setName(name); } -std::string ExprLambda::showNamePos() const +std::string ExprLambda::showNamePos(const EvalState & state) const { - return fmt("%1% at %2%", name.set() ? "'" + (std::string) name + "'" : "anonymous function", pos); + std::string id(name + ? concatStrings("'", state.symbols[name], "'") + : "anonymous function"); + return fmt("%1% at %2%", id, state.positions[pos]); } @@ -480,8 +575,7 @@ std::string ExprLambda::showNamePos() const size_t SymbolTable::totalSize() const { size_t n = 0; - for (auto & i : store) - n += i.size(); + dump([&] (const std::string & s) { n += s.size(); }); return n; } diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 12b54b8eb..5eb022770 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -1,8 +1,12 @@ #pragma once +#include <map> +#include <vector> + #include "value.hh" #include "symbol-table.hh" #include "error.hh" +#include "chunked-vector.hh" namespace nix { @@ -18,38 +22,96 @@ MakeError(UndefinedVarError, Error); MakeError(MissingArgumentError, EvalError); MakeError(RestrictedPathError, Error); - /* Position objects. */ struct Pos { + std::string file; FileOrigin origin; - Symbol file; - unsigned int line, column; + uint32_t line; + uint32_t column; + + explicit operator bool() const { return line > 0; } +}; + +class PosIdx { + friend class PosTable; + +private: + uint32_t id; + + explicit PosIdx(uint32_t id): id(id) {} - Pos() : origin(foString), line(0), column(0) { } - Pos(FileOrigin origin, const Symbol & file, unsigned int line, unsigned int column) - : origin(origin), file(file), line(line), column(column) { } +public: + PosIdx() : id(0) {} - operator bool() const + explicit operator bool() const { return id > 0; } + + bool operator<(const PosIdx other) const { return id < other.id; } +}; + +class PosTable +{ +public: + class Origin { + friend PosTable; + private: + // must always be invalid by default, add() replaces this with the actual value. + // subsequent add() calls use this index as a token to quickly check whether the + // current origins.back() can be reused or not. + mutable uint32_t idx = std::numeric_limits<uint32_t>::max(); + + explicit Origin(uint32_t idx): idx(idx), file{}, origin{} {} + + public: + const std::string file; + const FileOrigin origin; + + Origin(std::string file, FileOrigin origin): file(std::move(file)), origin(origin) {} + }; + + struct Offset { + uint32_t line, column; + }; + +private: + std::vector<Origin> origins; + ChunkedVector<Offset, 8192> offsets; + +public: + PosTable(): offsets(1024) { - return line != 0; + origins.reserve(1024); } - bool operator < (const Pos & p2) const + PosIdx add(const Origin & origin, uint32_t line, uint32_t column) { - if (!line) return p2.line; - if (!p2.line) return false; - int d = ((const std::string &) file).compare((const std::string &) p2.file); - if (d < 0) return true; - if (d > 0) return false; - if (line < p2.line) return true; - if (line > p2.line) return false; - return column < p2.column; + const auto idx = offsets.add({line, column}).second; + if (origins.empty() || origins.back().idx != origin.idx) { + origin.idx = idx; + origins.push_back(origin); + } + return PosIdx(idx + 1); + } + + Pos operator[](PosIdx p) const + { + if (p.id == 0 || p.id > offsets.size()) + return {}; + const auto idx = p.id - 1; + /* we want the last key <= idx, so we'll take prev(first key > idx). + this is guaranteed to never rewind origin.begin because the first + key is always 0. */ + const auto pastOrigin = std::upper_bound( + origins.begin(), origins.end(), Origin(idx), + [] (const auto & a, const auto & b) { return a.idx < b.idx; }); + const auto origin = *std::prev(pastOrigin); + const auto offset = offsets[idx]; + return {origin.file, origin.origin, offset.line, offset.column}; } }; -extern Pos noPos; +inline PosIdx noPos = {}; std::ostream & operator << (std::ostream & str, const Pos & pos); @@ -65,13 +127,13 @@ struct AttrName { Symbol symbol; Expr * expr; - AttrName(const Symbol & s) : symbol(s) {}; + AttrName(Symbol s) : symbol(s) {}; AttrName(Expr * e) : expr(e) {}; }; typedef std::vector<AttrName> AttrPath; -std::string showAttrPath(const AttrPath & attrPath); +std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath); /* Abstract syntax of Nix expressions. */ @@ -79,27 +141,26 @@ std::string showAttrPath(const AttrPath & attrPath); struct Expr { virtual ~Expr() { }; - virtual void show(std::ostream & str) const; - virtual void bindVars(const StaticEnv & env); + virtual void show(const SymbolTable & symbols, std::ostream & str) const; + virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env); virtual void eval(EvalState & state, Env & env, Value & v); virtual Value * maybeThunk(EvalState & state, Env & env); - virtual void setName(Symbol & name); + virtual void setName(Symbol name); + virtual PosIdx getPos() const { return noPos; } }; -std::ostream & operator << (std::ostream & str, const Expr & e); - #define COMMON_METHODS \ - void show(std::ostream & str) const; \ - void eval(EvalState & state, Env & env, Value & v); \ - void bindVars(const StaticEnv & env); + void show(const SymbolTable & symbols, std::ostream & str) const override; \ + void eval(EvalState & state, Env & env, Value & v) override; \ + void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override; struct ExprInt : Expr { NixInt n; Value v; ExprInt(NixInt n) : n(n) { v.mkInt(n); }; + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS - Value * maybeThunk(EvalState & state, Env & env); }; struct ExprFloat : Expr @@ -107,8 +168,8 @@ struct ExprFloat : Expr NixFloat nf; Value v; ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); }; + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS - Value * maybeThunk(EvalState & state, Env & env); }; struct ExprString : Expr @@ -116,8 +177,8 @@ struct ExprString : Expr std::string s; Value v; ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); }; + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS - Value * maybeThunk(EvalState & state, Env & env); }; struct ExprPath : Expr @@ -125,8 +186,8 @@ struct ExprPath : Expr std::string s; Value v; ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); }; + Value * maybeThunk(EvalState & state, Env & env) override; COMMON_METHODS - Value * maybeThunk(EvalState & state, Env & env); }; typedef uint32_t Level; @@ -134,7 +195,7 @@ typedef uint32_t Displacement; struct ExprVar : Expr { - Pos pos; + PosIdx pos; Symbol name; /* Whether the variable comes from an environment (e.g. a rec, let @@ -150,19 +211,21 @@ struct ExprVar : Expr Level level; Displacement displ; - ExprVar(const Symbol & name) : name(name) { }; - ExprVar(const Pos & pos, const Symbol & name) : pos(pos), name(name) { }; + ExprVar(Symbol name) : name(name) { }; + ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { }; + Value * maybeThunk(EvalState & state, Env & env) override; + PosIdx getPos() const override { return pos; } COMMON_METHODS - Value * maybeThunk(EvalState & state, Env & env); }; struct ExprSelect : Expr { - Pos pos; + PosIdx pos; Expr * e, * def; AttrPath attrPath; - ExprSelect(const Pos & pos, Expr * e, const AttrPath & attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(attrPath) { }; - ExprSelect(const Pos & pos, Expr * e, const Symbol & name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); }; + ExprSelect(const PosIdx & pos, Expr * e, const AttrPath & attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(attrPath) { }; + ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); }; + PosIdx getPos() const override { return pos; } COMMON_METHODS }; @@ -171,19 +234,20 @@ struct ExprOpHasAttr : Expr Expr * e; AttrPath attrPath; ExprOpHasAttr(Expr * e, const AttrPath & attrPath) : e(e), attrPath(attrPath) { }; + PosIdx getPos() const override { return e->getPos(); } COMMON_METHODS }; struct ExprAttrs : Expr { bool recursive; - Pos pos; + PosIdx pos; struct AttrDef { bool inherited; Expr * e; - Pos pos; + PosIdx pos; Displacement displ; // displacement - AttrDef(Expr * e, const Pos & pos, bool inherited=false) + AttrDef(Expr * e, const PosIdx & pos, bool inherited=false) : inherited(inherited), e(e), pos(pos) { }; AttrDef() { }; }; @@ -191,14 +255,15 @@ struct ExprAttrs : Expr AttrDefs attrs; struct DynamicAttrDef { Expr * nameExpr, * valueExpr; - Pos pos; - DynamicAttrDef(Expr * nameExpr, Expr * valueExpr, const Pos & pos) + PosIdx pos; + DynamicAttrDef(Expr * nameExpr, Expr * valueExpr, const PosIdx & pos) : nameExpr(nameExpr), valueExpr(valueExpr), pos(pos) { }; }; typedef std::vector<DynamicAttrDef> DynamicAttrDefs; DynamicAttrDefs dynamicAttrs; - ExprAttrs(const Pos &pos) : recursive(false), pos(pos) { }; - ExprAttrs() : recursive(false), pos(noPos) { }; + ExprAttrs(const PosIdx &pos) : recursive(false), pos(pos) { }; + ExprAttrs() : recursive(false) { }; + PosIdx getPos() const override { return pos; } COMMON_METHODS }; @@ -207,14 +272,18 @@ struct ExprList : Expr std::vector<Expr *> elems; ExprList() { }; COMMON_METHODS + + PosIdx getPos() const override + { + return elems.empty() ? noPos : elems.front()->getPos(); + } }; struct Formal { - Pos pos; + PosIdx pos; Symbol name; Expr * def; - Formal(const Pos & pos, const Symbol & name, Expr * def) : pos(pos), name(name), def(def) { }; }; struct Formals @@ -223,18 +292,20 @@ struct Formals Formals_ formals; bool ellipsis; - bool has(Symbol arg) const { + bool has(Symbol arg) const + { auto it = std::lower_bound(formals.begin(), formals.end(), arg, [] (const Formal & f, const Symbol & sym) { return f.name < sym; }); return it != formals.end() && it->name == arg; } - std::vector<Formal> lexicographicOrder() const + std::vector<Formal> lexicographicOrder(const SymbolTable & symbols) const { std::vector<Formal> result(formals.begin(), formals.end()); std::sort(result.begin(), result.end(), - [] (const Formal & a, const Formal & b) { - return std::string_view(a.name) < std::string_view(b.name); + [&] (const Formal & a, const Formal & b) { + std::string_view sa = symbols[a.name], sb = symbols[b.name]; + return sa < sb; }); return result; } @@ -242,18 +313,23 @@ struct Formals struct ExprLambda : Expr { - Pos pos; + PosIdx pos; Symbol name; Symbol arg; Formals * formals; Expr * body; - ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body) + ExprLambda(PosIdx pos, Symbol arg, Formals * formals, Expr * body) : pos(pos), arg(arg), formals(formals), body(body) { }; - void setName(Symbol & name); - std::string showNamePos() const; + ExprLambda(PosIdx pos, Formals * formals, Expr * body) + : pos(pos), formals(formals), body(body) + { + } + void setName(Symbol name) override; + std::string showNamePos(const EvalState & state) const; inline bool hasFormals() const { return formals != nullptr; } + PosIdx getPos() const override { return pos; } COMMON_METHODS }; @@ -261,10 +337,11 @@ struct ExprCall : Expr { Expr * fun; std::vector<Expr *> args; - Pos pos; - ExprCall(const Pos & pos, Expr * fun, std::vector<Expr *> && args) + PosIdx pos; + ExprCall(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args) : fun(fun), args(args), pos(pos) { } + PosIdx getPos() const override { return pos; } COMMON_METHODS }; @@ -278,26 +355,29 @@ struct ExprLet : Expr struct ExprWith : Expr { - Pos pos; + PosIdx pos; Expr * attrs, * body; size_t prevWith; - ExprWith(const Pos & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { }; + ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { }; + PosIdx getPos() const override { return pos; } COMMON_METHODS }; struct ExprIf : Expr { - Pos pos; + PosIdx pos; Expr * cond, * then, * else_; - ExprIf(const Pos & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { }; + ExprIf(const PosIdx & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { }; + PosIdx getPos() const override { return pos; } COMMON_METHODS }; struct ExprAssert : Expr { - Pos pos; + PosIdx pos; Expr * cond, * body; - ExprAssert(const Pos & pos, Expr * cond, Expr * body) : pos(pos), cond(cond), body(body) { }; + ExprAssert(const PosIdx & pos, Expr * cond, Expr * body) : pos(pos), cond(cond), body(body) { }; + PosIdx getPos() const override { return pos; } COMMON_METHODS }; @@ -311,19 +391,20 @@ struct ExprOpNot : Expr #define MakeBinOp(name, s) \ struct name : Expr \ { \ - Pos pos; \ + PosIdx pos; \ Expr * e1, * e2; \ name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \ - name(const Pos & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \ - void show(std::ostream & str) const \ + name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \ + void show(const SymbolTable & symbols, std::ostream & str) const override \ { \ - str << "(" << *e1 << " " s " " << *e2 << ")"; \ + str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \ } \ - void bindVars(const StaticEnv & env) \ + void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override \ { \ - e1->bindVars(env); e2->bindVars(env); \ + e1->bindVars(es, env); e2->bindVars(es, env); \ } \ - void eval(EvalState & state, Env & env, Value & v); \ + void eval(EvalState & state, Env & env, Value & v) override; \ + PosIdx getPos() const override { return pos; } \ }; MakeBinOp(ExprOpEq, "==") @@ -336,18 +417,20 @@ MakeBinOp(ExprOpConcatLists, "++") struct ExprConcatStrings : Expr { - Pos pos; + PosIdx pos; bool forceString; - std::vector<std::pair<Pos, Expr *> > * es; - ExprConcatStrings(const Pos & pos, bool forceString, std::vector<std::pair<Pos, Expr *> > * es) + std::vector<std::pair<PosIdx, Expr *>> * es; + ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector<std::pair<PosIdx, Expr *>> * es) : pos(pos), forceString(forceString), es(es) { }; + PosIdx getPos() const override { return pos; } COMMON_METHODS }; struct ExprPos : Expr { - Pos pos; - ExprPos(const Pos & pos) : pos(pos) { }; + PosIdx pos; + ExprPos(const PosIdx & pos) : pos(pos) { }; + PosIdx getPos() const override { return pos; } COMMON_METHODS }; @@ -385,7 +468,7 @@ struct StaticEnv vars.erase(it, end); } - Vars::const_iterator find(const Symbol & name) const + Vars::const_iterator find(Symbol name) const { Vars::value_type key(name, 0); auto i = std::lower_bound(vars.begin(), vars.end(), key); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 919b9cfae..7c9b5a2db 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -32,12 +32,12 @@ namespace nix { SymbolTable & symbols; Expr * result; Path basePath; - Symbol file; - FileOrigin origin; + PosTable::Origin origin; std::optional<ErrorInfo> error; - ParseData(EvalState & state) + ParseData(EvalState & state, PosTable::Origin origin) : state(state) , symbols(state.symbols) + , origin(std::move(origin)) { }; }; @@ -77,26 +77,26 @@ using namespace nix; namespace nix { -static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prevPos) +static void dupAttr(const EvalState & state, const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", - showAttrPath(attrPath), prevPos), - .errPos = pos + showAttrPath(state.symbols, attrPath), state.positions[prevPos]), + .errPos = state.positions[pos] }); } -static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos) +static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos), - .errPos = pos + .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), + .errPos = state.positions[pos] }); } static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, - Expr * e, const Pos & pos) + Expr * e, const PosIdx pos, const nix::EvalState & state) { AttrPath::iterator i; // All attrpaths have at least one attr @@ -104,15 +104,15 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, // Checking attrPath validity. // =========================== for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) { - if (i->symbol.set()) { + if (i->symbol) { ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); if (j != attrs->attrs.end()) { if (!j->second.inherited) { ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e); - if (!attrs2) dupAttr(attrPath, pos, j->second.pos); + if (!attrs2) dupAttr(state, attrPath, pos, j->second.pos); attrs = attrs2; } else - dupAttr(attrPath, pos, j->second.pos); + dupAttr(state, attrPath, pos, j->second.pos); } else { ExprAttrs * nested = new ExprAttrs; attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); @@ -126,7 +126,7 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, } // Expr insertion. // ========================== - if (i->symbol.set()) { + if (i->symbol) { ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); if (j != attrs->attrs.end()) { // This attr path is already defined. However, if both @@ -139,11 +139,11 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, for (auto & ad : ae->attrs) { auto j2 = jAttrs->attrs.find(ad.first); if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. - dupAttr(ad.first, j2->second.pos, ad.second.pos); + dupAttr(state, ad.first, j2->second.pos, ad.second.pos); jAttrs->attrs.emplace(ad.first, ad.second); } } else { - dupAttr(attrPath, pos, j->second.pos); + dupAttr(state, attrPath, pos, j->second.pos); } } else { // This attr path is not defined. Let's create it. @@ -157,14 +157,14 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath, static Formals * toFormals(ParseData & data, ParserFormals * formals, - Pos pos = noPos, Symbol arg = {}) + PosIdx pos = noPos, Symbol arg = {}) { std::sort(formals->formals.begin(), formals->formals.end(), [] (const auto & a, const auto & b) { return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); }); - std::optional<std::pair<Symbol, Pos>> duplicate; + std::optional<std::pair<Symbol, PosIdx>> duplicate; for (size_t i = 0; i + 1 < formals->formals.size(); i++) { if (formals->formals[i].name != formals->formals[i + 1].name) continue; @@ -173,18 +173,18 @@ static Formals * toFormals(ParseData & data, ParserFormals * formals, } if (duplicate) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", duplicate->first), - .errPos = duplicate->second + .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[duplicate->first]), + .errPos = data.state.positions[duplicate->second] }); Formals result; result.ellipsis = formals->ellipsis; result.formals = std::move(formals->formals); - if (arg.set() && result.has(arg)) + if (arg && result.has(arg)) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", arg), - .errPos = pos + .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]), + .errPos = data.state.positions[pos] }); delete formals; @@ -192,8 +192,8 @@ static Formals * toFormals(ParseData & data, ParserFormals * formals, } -static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, - std::vector<std::pair<Pos, std::variant<Expr *, StringToken> > > & es) +static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols, + std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> & es) { if (es.empty()) return new ExprString(""); @@ -233,7 +233,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, } /* Strip spaces from each line. */ - std::vector<std::pair<Pos, Expr *> > * es2 = new std::vector<std::pair<Pos, Expr *> >; + auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>; atStartOfLine = true; size_t curDropped = 0; size_t n = es.size(); @@ -284,9 +284,9 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, } -static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data) +static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) { - return Pos(data->origin, data->file, loc.first_line, loc.first_column); + return data->state.positions.add(data->origin, loc.first_line, loc.first_column); } #define CUR_POS makeCurPos(*yylocp, data) @@ -299,7 +299,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err { data->error = { .msg = hintfmt(error), - .errPos = makeCurPos(*loc, data) + .errPos = data->state.positions[makeCurPos(*loc, data)] }; } @@ -320,8 +320,8 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err StringToken uri; StringToken str; std::vector<nix::AttrName> * attrNames; - std::vector<std::pair<nix::Pos, nix::Expr *> > * string_parts; - std::vector<std::pair<nix::Pos, std::variant<nix::Expr *, StringToken> > > * ind_string_parts; + std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts; + std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, StringToken>>> * ind_string_parts; } %type <e> start expr expr_function expr_if expr_op @@ -369,15 +369,15 @@ expr_function : ID ':' expr_function { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->symbols.create(""), toFormals(*data, $2), $5); } + { $$ = new ExprLambda(CUR_POS, toFormals(*data, $2), $5); } | '{' formals '}' '@' ID ':' expr_function { - Symbol arg = data->symbols.create($5); + auto arg = data->symbols.create($5); $$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7); } | ID '@' '{' formals '}' ':' expr_function { - Symbol arg = data->symbols.create($1); + auto arg = data->symbols.create($1); $$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7); } | ASSERT expr ';' expr_function @@ -388,7 +388,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = CUR_POS + .errPos = data->state.positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -415,7 +415,7 @@ expr_op | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(CUR_POS, $1, $3); } | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, *$3); } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(CUR_POS, false, new std::vector<std::pair<Pos, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); } + { $$ = new ExprConcatStrings(CUR_POS, false, new std::vector<std::pair<PosIdx, Expr *>>({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); } | expr_op '-' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {$1, $3}); } | expr_op '*' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__mul")), {$1, $3}); } | expr_op '/' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__div")), {$1, $3}); } @@ -477,7 +477,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = CUR_POS + .errPos = data->state.positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -503,9 +503,9 @@ string_parts_interpolated : string_parts_interpolated STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); } | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<Pos, Expr *> >; $$->emplace_back(makeCurPos(@1, data), $2); } + | DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(makeCurPos(@1, data), $2); } | STR DOLLAR_CURLY expr '}' { - $$ = new std::vector<std::pair<Pos, Expr *> >; + $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1))); $$->emplace_back(makeCurPos(@2, data), $3); } @@ -520,6 +520,12 @@ path_start $$ = new ExprPath(path); } | HPATH { + if (evalSettings.pureEval) { + throw Error( + "the path '%s' can not be resolved in pure mode", + std::string_view($1.p, $1.l) + ); + } Path path(getHome() + std::string($1.p + 1, $1.l - 1)); $$ = new ExprPath(path); } @@ -528,17 +534,17 @@ path_start ind_string_parts : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); } | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); } - | { $$ = new std::vector<std::pair<Pos, std::variant<Expr *, StringToken> > >; } + | { $$ = new std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>>; } ; binds - : binds attrpath '=' expr ';' { $$ = $1; addAttr($$, *$2, $4, makeCurPos(@2, data)); } + : binds attrpath '=' expr ';' { $$ = $1; addAttr($$, *$2, $4, makeCurPos(@2, data), data->state); } | binds INHERIT attrs ';' { $$ = $1; for (auto & i : *$3) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - dupAttr(i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos); - Pos pos = makeCurPos(@3, data); + dupAttr(data->state, i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos); + auto pos = makeCurPos(@3, data); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); } } @@ -547,7 +553,7 @@ binds /* !!! Should ensure sharing of the expression in $4. */ for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - dupAttr(i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos); + dupAttr(data->state, i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data))); } } @@ -565,7 +571,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = makeCurPos(@2, data) + .errPos = data->state.positions[makeCurPos(@2, data)] }); } | { $$ = new AttrPath; } @@ -621,8 +627,8 @@ formals ; formal - : ID { $$ = new Formal(CUR_POS, data->symbols.create($1), 0); } - | ID '?' expr { $$ = new Formal(CUR_POS, data->symbols.create($1), $3); } + : ID { $$ = new Formal{CUR_POS, data->symbols.create($1), 0}; } + | ID '?' expr { $$ = new Formal{CUR_POS, data->symbols.create($1), $3}; } ; %% @@ -643,22 +649,22 @@ namespace nix { Expr * EvalState::parse(char * text, size_t length, FileOrigin origin, - const PathView path, const PathView basePath, StaticEnv & staticEnv) + const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv) { yyscan_t scanner; - ParseData data(*this); - data.origin = origin; + std::string file; switch (origin) { case foFile: - data.file = data.symbols.create(path); + file = path; break; case foStdin: case foString: - data.file = data.symbols.create(text); + file = text; break; default: assert(false); } + ParseData data(*this, {file, origin}); data.basePath = basePath; yylex_init(&scanner); @@ -668,7 +674,7 @@ Expr * EvalState::parse(char * text, size_t length, FileOrigin origin, if (res) throw ParseError(data.error.value()); - data.result->bindVars(staticEnv); + data.result->bindVars(*this, staticEnv); return data.result; } @@ -706,7 +712,7 @@ Expr * EvalState::parseExprFromFile(const Path & path) } -Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv) +Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv) { auto buffer = readFile(path); // readFile should have left some extra space for terminators @@ -715,7 +721,7 @@ Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv) } -Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv) +Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv) { s.append("\0\0", 2); return parse(s.data(), s.size(), foString, "", basePath, staticEnv); @@ -760,7 +766,7 @@ Path EvalState::findFile(const std::string_view path) } -Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos) +Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos) { for (auto & i : searchPath) { std::string suffix; @@ -782,13 +788,13 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c if (hasPrefix(path, "nix/")) return concatStrings(corepkgsPrefix, path.substr(4)); - throw ThrownError({ + debugThrowLastTrace(ThrownError({ .msg = hintfmt(evalSettings.pureEval ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", path), - .errPos = pos - }); + .errPos = positions[pos] + })); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index aa22c3b61..840bfecef 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -43,10 +43,10 @@ StringMap EvalState::realiseContext(const PathSet & context) StringMap res; for (auto & i : context) { - auto [ctxS, outputName] = decodeContext(i); - auto ctx = store->parseStorePath(ctxS); + auto [ctx, outputName] = decodeContext(*store, i); + auto ctxS = store->printStorePath(ctx); if (!store->isValidPath(ctx)) - throw InvalidPathError(store->printStorePath(ctx)); + debugThrowLastTrace(InvalidPathError(store->printStorePath(ctx))); if (!outputName.empty() && ctx.isDerivation()) { drvs.push_back({ctx, {outputName}}); } else { @@ -57,9 +57,9 @@ StringMap EvalState::realiseContext(const PathSet & context) if (drvs.empty()) return {}; if (!evalSettings.enableImportFromDerivation) - throw Error( + debugThrowLastTrace(Error( "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", - store->printStorePath(drvs.begin()->drvPath)); + store->printStorePath(drvs.begin()->drvPath))); /* Build/substitute the context. */ std::vector<DerivedPath> buildReqs; @@ -68,14 +68,15 @@ StringMap EvalState::realiseContext(const PathSet & context) /* Get all the output paths corresponding to the placeholders we had */ for (auto & [drvPath, outputs] : drvs) { - auto outputPaths = store->queryDerivationOutputMap(drvPath); + const auto outputPaths = store->queryDerivationOutputMap(drvPath); for (auto & outputName : outputs) { - if (outputPaths.count(outputName) == 0) - throw Error("derivation '%s' does not have an output named '%s'", - store->printStorePath(drvPath), outputName); + auto outputPath = get(outputPaths, outputName); + if (!outputPath) + debugThrowLastTrace(Error("derivation '%s' does not have an output named '%s'", + store->printStorePath(drvPath), outputName)); res.insert_or_assign( downstreamPlaceholder(*store, drvPath, outputName), - store->printStorePath(outputPaths.at(outputName)) + store->printStorePath(*outputPath) ); } } @@ -96,7 +97,7 @@ struct RealisePathFlags { bool checkForPureEval = true; }; -static Path realisePath(EvalState & state, const Pos & pos, Value & v, const RealisePathFlags flags = {}) +static Path realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {}) { PathSet context; @@ -105,7 +106,7 @@ static Path realisePath(EvalState & state, const Pos & pos, Value & v, const Rea try { return state.coerceToPath(pos, v, context); } catch (Error & e) { - e.addTrace(pos, "while realising the context of a path"); + e.addTrace(state.positions[pos], "while realising the context of a path"); throw; } }(); @@ -119,7 +120,7 @@ static Path realisePath(EvalState & state, const Pos & pos, Value & v, const Rea ? state.checkSourcePath(realPath) : realPath; } catch (Error & e) { - e.addTrace(pos, "while realising the context of path '%s'", path); + e.addTrace(state.positions[pos], "while realising the context of path '%s'", path); throw; } } @@ -157,7 +158,7 @@ static void mkOutputString( /* Load and evaluate an expression from path specified by the argument. */ -static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vScope, Value & v) +static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v) { auto path = realisePath(state, pos, vPath); @@ -215,11 +216,11 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS Env * env = &state.allocEnv(vScope->attrs->size()); env->up = &state.baseEnv; - StaticEnv staticEnv(false, &state.staticBaseEnv, vScope->attrs->size()); + auto staticEnv = std::make_shared<StaticEnv>(false, state.staticBaseEnv.get(), vScope->attrs->size()); unsigned int displ = 0; for (auto & attr : *vScope->attrs) { - staticEnv.vars.emplace_back(attr.name, displ); + staticEnv->vars.emplace_back(attr.name, displ); env->values[displ++] = attr.value; } @@ -237,7 +238,7 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS static RegisterPrimOp primop_scopedImport(RegisterPrimOp::Info { .name = "scopedImport", .arity = 2, - .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v) + .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) { import(state, pos, *args[1], args[0], v); } @@ -299,7 +300,7 @@ static RegisterPrimOp primop_import({ (The function argument doesn’t have to be called `x` in `foo.nix`; any name would work.) )", - .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v) + .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) { import(state, pos, *args[0], nullptr, v); } @@ -310,7 +311,7 @@ static RegisterPrimOp primop_import({ extern "C" typedef void (*ValueInitializer)(EvalState & state, Value & v); /* Load a ValueInitializer from a DSO and return whatever it initializes */ -void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value & v) +void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto path = realisePath(state, pos, *args[0]); @@ -318,17 +319,16 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value void *handle = dlopen(path.c_str(), RTLD_LAZY | RTLD_LOCAL); if (!handle) - throw EvalError("could not open '%1%': %2%", path, dlerror()); + state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror())); dlerror(); ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str()); if(!func) { char *message = dlerror(); if (message) - throw EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message); + state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message)); else - throw EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", - sym, path); + state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path)); } (func)(state, v); @@ -338,17 +338,16 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value /* Execute a program and parse its output */ -void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v) +void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[0], pos); auto elems = args[0]->listElems(); auto count = args[0]->listSize(); - if (count == 0) { - throw EvalError({ + if (count == 0) + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("at least one argument to 'exec' required"), - .errPos = pos - }); - } + .errPos = state.positions[pos] + })); PathSet context; auto program = state.coerceToString(pos, *elems[0], context, false, false).toOwned(); Strings commandArgs; @@ -358,32 +357,33 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v) try { auto _ = state.realiseContext(context); // FIXME: Handle CA derivations } catch (InvalidPathError & e) { - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("cannot execute '%1%', since path '%2%' is not valid", program, e.path), - .errPos = pos - }); + .errPos = state.positions[pos] + })); } auto output = runProgram(program, true, commandArgs); Expr * parsed; try { - parsed = state.parseExprFromString(std::move(output), pos.file); + auto base = state.positions[pos]; + parsed = state.parseExprFromString(std::move(output), base.file); } catch (Error & e) { - e.addTrace(pos, "While parsing the output from '%1%'", program); + e.addTrace(state.positions[pos], "While parsing the output from '%1%'", program); throw; } try { state.eval(parsed, v); } catch (Error & e) { - e.addTrace(pos, "While evaluating the output from '%1%'", program); + e.addTrace(state.positions[pos], "While evaluating the output from '%1%'", program); throw; } } /* Return a string representing the type of the expression. */ -static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_typeOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); std::string t; @@ -402,7 +402,7 @@ static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Valu case nFloat: t = "float"; break; case nThunk: abort(); } - v.mkString(state.symbols.create(t)); + v.mkString(t); } static RegisterPrimOp primop_typeOf({ @@ -417,7 +417,7 @@ static RegisterPrimOp primop_typeOf({ }); /* Determine whether the argument is the null value. */ -static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isNull(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nNull); @@ -437,7 +437,7 @@ static RegisterPrimOp primop_isNull({ }); /* Determine whether the argument is a function. */ -static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isFunction(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nFunction); @@ -453,7 +453,7 @@ static RegisterPrimOp primop_isFunction({ }); /* Determine whether the argument is an integer. */ -static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isInt(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nInt); @@ -469,7 +469,7 @@ static RegisterPrimOp primop_isInt({ }); /* Determine whether the argument is a float. */ -static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isFloat(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nFloat); @@ -485,7 +485,7 @@ static RegisterPrimOp primop_isFloat({ }); /* Determine whether the argument is a string. */ -static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isString(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nString); @@ -501,7 +501,7 @@ static RegisterPrimOp primop_isString({ }); /* Determine whether the argument is a Boolean. */ -static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isBool(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nBool); @@ -517,7 +517,7 @@ static RegisterPrimOp primop_isBool({ }); /* Determine whether the argument is a path. */ -static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isPath(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nPath); @@ -545,7 +545,7 @@ struct CompareValues if (v1->type() == nInt && v2->type() == nFloat) return v1->integer < v2->fpoint; if (v1->type() != v2->type()) - throw EvalError("cannot compare %1% with %2%", showType(*v1), showType(*v2)); + state.debugThrowLastTrace(EvalError("cannot compare %1% with %2%", showType(*v1), showType(*v2))); switch (v1->type()) { case nInt: return v1->integer < v2->integer; @@ -567,14 +567,14 @@ struct CompareValues } } default: - throw EvalError("cannot compare %1% with %2%", showType(*v1), showType(*v2)); + state.debugThrowLastTrace(EvalError("cannot compare %1% with %2%", showType(*v1), showType(*v2))); } } }; #if HAVE_BOEHMGC -typedef std::list<Value *, gc_allocator<Value *> > ValueList; +typedef std::list<Value *, gc_allocator<Value *>> ValueList; #else typedef std::list<Value *> ValueList; #endif @@ -585,39 +585,39 @@ static Bindings::iterator getAttr( std::string_view funcName, Symbol attrSym, Bindings * attrSet, - const Pos & pos) + const PosIdx pos) { Bindings::iterator value = attrSet->find(attrSym); if (value == attrSet->end()) { hintformat errorMsg = hintfmt( "attribute '%s' missing for call to '%s'", - attrSym, + state.symbols[attrSym], funcName ); - Pos aPos = *attrSet->pos; - if (aPos == noPos) { - throw TypeError({ + auto aPos = attrSet->pos; + if (!aPos) { + state.debugThrowLastTrace(TypeError({ .msg = errorMsg, - .errPos = pos, - }); + .errPos = state.positions[pos], + })); } else { auto e = TypeError({ .msg = errorMsg, - .errPos = aPos, + .errPos = state.positions[aPos], }); // Adding another trace for the function name to make it clear // which call received wrong arguments. - e.addTrace(pos, hintfmt("while invoking '%s'", funcName)); - throw e; + e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", funcName)); + state.debugThrowLastTrace(e); } } return value; } -static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[0], pos); @@ -664,10 +664,10 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar Bindings::iterator key = e->attrs->find(state.sKey); if (key == e->attrs->end()) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("attribute 'key' required"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); state.forceValue(*key->value, pos); if (!doneKeys.insert(key->value).second) continue; @@ -694,21 +694,81 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info { .name = "__genericClosure", + .args = {"attrset"}, .arity = 1, + .doc = R"( + Take an *attrset* with values named `startSet` and `operator` in order to + return a *list of attrsets* by starting with the `startSet`, recursively + applying the `operator` function to each element. The *attrsets* in the + `startSet` and produced by the `operator` must each contain value named + `key` which are comparable to each other. The result is produced by + repeatedly calling the operator for each element encountered with a + unique key, terminating when no new elements are produced. For example, + + ``` + builtins.genericClosure { + startSet = [ {key = 5;} ]; + operator = item: [{ + key = if (item.key / 2 ) * 2 == item.key + then item.key / 2 + else 3 * item.key + 1; + }]; + } + ``` + evaluates to + ``` + [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ] + ``` + )", .fun = prim_genericClosure, }); + +static RegisterPrimOp primop_break({ + .name = "break", + .args = {"v"}, + .doc = R"( + In debug mode (enabled using `--debugger`), pause Nix expression evaluation and enter the REPL. + Otherwise, return the argument `v`. + )", + .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) + { + if (state.debugRepl && !state.debugTraces.empty()) { + auto error = Error(ErrorInfo { + .level = lvlInfo, + .msg = hintfmt("breakpoint reached"), + .errPos = state.positions[pos], + }); + + auto & dt = state.debugTraces.front(); + state.runDebugRepl(&error, dt.env, dt.expr); + + if (state.debugQuit) { + // If the user elects to quit the repl, throw an exception. + throw Error(ErrorInfo{ + .level = lvlInfo, + .msg = hintfmt("quit the debugger"), + .errPos = state.positions[noPos], + }); + } + } + + // Return the value we were passed. + v = *args[0]; + } +}); + static RegisterPrimOp primop_abort({ .name = "abort", .args = {"s"}, .doc = R"( Abort Nix expression evaluation and print the error message *s*. )", - .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v) + .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto s = state.coerceToString(pos, *args[0], context).toOwned(); - throw Abort("evaluation aborted with the following error message: '%1%'", s); + state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s)); } }); @@ -722,15 +782,15 @@ static RegisterPrimOp primop_throw({ derivations, a derivation that throws an error is silently skipped (which is not the case for `abort`). )", - .fun = [](EvalState & state, const Pos & pos, Value * * args, Value & v) + .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto s = state.coerceToString(pos, *args[0], context).toOwned(); - throw ThrownError(s); + state.debugThrowLastTrace(ThrownError(s)); } }); -static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { try { state.forceValue(*args[1], pos); @@ -748,7 +808,7 @@ static RegisterPrimOp primop_addErrorContext(RegisterPrimOp::Info { .fun = prim_addErrorContext, }); -static void prim_ceil(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_ceil(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto value = state.forceFloat(*args[0], args[0]->determinePos(pos)); v.mkInt(ceil(value)); @@ -767,7 +827,7 @@ static RegisterPrimOp primop_ceil({ .fun = prim_ceil, }); -static void prim_floor(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_floor(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto value = state.forceFloat(*args[0], args[0]->determinePos(pos)); v.mkInt(floor(value)); @@ -788,9 +848,21 @@ static RegisterPrimOp primop_floor({ /* Try evaluating the argument. Success => {success=true; value=something;}, * else => {success=false; value=false;} */ -static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto attrs = state.buildBindings(2); + + /* increment state.trylevel, and decrement it when this function returns. */ + MaintainCount trylevel(state.trylevel); + + void (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr; + if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry) + { + /* to prevent starting the repl from exceptions withing a tryEval, null it. */ + savedDebugRepl = state.debugRepl; + state.debugRepl = nullptr; + } + try { state.forceValue(*args[0], pos); attrs.insert(state.sValue, args[0]); @@ -799,6 +871,11 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val attrs.alloc(state.sValue).mkBool(false); attrs.alloc("success").mkBool(false); } + + // restore the debugRepl pointer if we saved it earlier. + if (savedDebugRepl) + state.debugRepl = savedDebugRepl; + v.mkAttrs(attrs); } @@ -824,7 +901,7 @@ static RegisterPrimOp primop_tryEval({ }); /* Return an environment variable. Use with care. */ -static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_getEnv(EvalState & state, const PosIdx pos, Value * * args, Value & v) { std::string name(state.forceStringNoCtx(*args[0], pos)); v.mkString(evalSettings.restrictEval || evalSettings.pureEval ? "" : getEnv(name).value_or("")); @@ -848,7 +925,7 @@ static RegisterPrimOp primop_getEnv({ }); /* Evaluate the first argument, then return the second argument. */ -static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_seq(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -867,7 +944,7 @@ static RegisterPrimOp primop_seq({ /* Evaluate the first argument deeply (i.e. recursing into lists and attrsets), then return the second argument. */ -static void prim_deepSeq(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_deepSeq(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValueDeep(*args[0]); state.forceValue(*args[1], pos); @@ -887,13 +964,13 @@ static RegisterPrimOp primop_deepSeq({ /* Evaluate the first expression and print it on standard error. Then return the second expression. Useful for debugging. */ -static void prim_trace(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); if (args[0]->type() == nString) printError("trace: %1%", args[0]->string.s); else - printError("trace: %1%", *args[0]); + printError("trace: %1%", printValue(state, *args[0])); state.forceValue(*args[1], pos); v = *args[1]; } @@ -910,6 +987,15 @@ static RegisterPrimOp primop_trace({ }); +/* Takes two arguments and evaluates to the second one. Used as the + * builtins.traceVerbose implementation when --trace-verbose is not enabled + */ +static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Value & v) +{ + state.forceValue(*args[1], pos); + v = *args[1]; +} + /************************************************************* * Derivations *************************************************************/ @@ -922,7 +1008,7 @@ static RegisterPrimOp primop_trace({ derivation; `drvPath' containing the path of the Nix expression; and `type' set to `derivation' to indicate that this is a derivation. */ -static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[0], pos); @@ -936,11 +1022,11 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * ); std::string drvName; - Pos & posDrvName(*attr->pos); + const auto posDrvName = attr->pos; try { drvName = state.forceStringNoCtx(*attr->value, pos); } catch (Error & e) { - e.addTrace(posDrvName, "while evaluating the derivation attribute 'name'"); + e.addTrace(state.positions[posDrvName], "while evaluating the derivation attribute 'name'"); throw; } @@ -964,53 +1050,54 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * PathSet context; bool contentAddressed = false; + bool isImpure = false; std::optional<std::string> outputHash; std::string outputHashAlgo; - auto ingestionMethod = FileIngestionMethod::Flat; + std::optional<FileIngestionMethod> ingestionMethod; StringSet outputs; outputs.insert("out"); - for (auto & i : args[0]->attrs->lexicographicOrder()) { + for (auto & i : args[0]->attrs->lexicographicOrder(state.symbols)) { if (i->name == state.sIgnoreNulls) continue; - const std::string & key = i->name; + const std::string & key = state.symbols[i->name]; vomit("processing attribute '%1%'", key); auto handleHashMode = [&](const std::string_view s) { if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive; else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat; else - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); }; auto handleOutputs = [&](const Strings & ss) { outputs.clear(); for (auto & j : ss) { if (outputs.find(j) != outputs.end()) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("duplicate derivation output '%1%'", j), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); /* !!! Check whether j is a valid attribute name. */ /* Derivations cannot be named ‘drv’, because then we'd have an attribute ‘drvPath’ in the resulting set. */ if (j == "drv") - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("invalid derivation output name 'drv'" ), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); outputs.insert(j); } if (outputs.empty()) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("derivation cannot have an empty set of outputs"), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); }; try { @@ -1026,6 +1113,12 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * settings.requireExperimentalFeature(Xp::CaDerivations); } + else if (i->name == state.sImpure) { + isImpure = state.forceBool(*i->value, pos); + if (isImpure) + settings.requireExperimentalFeature(Xp::ImpureDerivations); + } + /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ else if (i->name == state.sArgs) { @@ -1067,7 +1160,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * } } else { - auto s = state.coerceToString(*i->pos, *i->value, context, true).toOwned(); + auto s = state.coerceToString(i->pos, *i->value, context, true).toOwned(); drv.env.emplace(key, s); if (i->name == state.sBuilder) drv.builder = std::move(s); else if (i->name == state.sSystem) drv.platform = std::move(s); @@ -1081,7 +1174,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * } } catch (Error & e) { - e.addTrace(posDrvName, + e.addTrace(state.positions[posDrvName], "while evaluating the attribute '%1%' of the derivation '%2%'", key, drvName); throw; @@ -1118,8 +1211,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * /* Handle derivation outputs of the form ‘!<name>!<path>’. */ else if (path.at(0) == '!') { - auto ctx = decodeContext(path); - drv.inputDrvs[state.store->parseStorePath(ctx.first)].insert(ctx.second); + auto ctx = decodeContext(*state.store, path); + drv.inputDrvs[ctx.first].insert(ctx.second); } /* Otherwise it's a source file. */ @@ -1129,23 +1222,23 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * /* Do we have all required attributes? */ if (drv.builder == "") - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("required attribute 'builder' missing"), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); if (drv.platform == "") - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("required attribute 'system' missing"), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); /* Check whether the derivation name is valid. */ if (isDerivation(drvName)) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); if (outputHash) { /* Handle fixed-output derivations. @@ -1153,36 +1246,49 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * Ignore `__contentAddressed` because fixed output derivations are already content addressed. */ if (outputs.size() != 1 || *(outputs.begin()) != "out") - throw Error({ + state.debugThrowLastTrace(Error({ .msg = hintfmt("multiple outputs are not supported in fixed-output derivations"), - .errPos = posDrvName - }); + .errPos = state.positions[posDrvName] + })); - std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo); - Hash h = newHashAllowEmpty(*outputHash, ht); + auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo)); - auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName); + auto method = ingestionMethod.value_or(FileIngestionMethod::Flat); + auto outPath = state.store->makeFixedOutputPath(method, h, drvName); drv.env["out"] = state.store->printStorePath(outPath); - drv.outputs.insert_or_assign("out", DerivationOutput { - .output = DerivationOutputCAFixed { - .hash = FixedOutputHash { - .method = ingestionMethod, - .hash = std::move(h), - }, + drv.outputs.insert_or_assign("out", + DerivationOutput::CAFixed { + .hash = FixedOutputHash { + .method = method, + .hash = std::move(h), }, - }); + }); } - else if (contentAddressed) { - HashType ht = parseHashType(outputHashAlgo); + else if (contentAddressed || isImpure) { + if (contentAddressed && isImpure) + throw EvalError({ + .msg = hintfmt("derivation cannot be both content-addressed and impure"), + .errPos = state.positions[posDrvName] + }); + + auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256); + auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); + for (auto & i : outputs) { drv.env[i] = hashPlaceholder(i); - drv.outputs.insert_or_assign(i, DerivationOutput { - .output = DerivationOutputCAFloating { - .method = ingestionMethod, - .hashType = ht, - }, - }); + if (isImpure) + drv.outputs.insert_or_assign(i, + DerivationOutput::Impure { + .method = method, + .hashType = ht, + }); + else + drv.outputs.insert_or_assign(i, + DerivationOutput::CAFloating { + .method = method, + .hashType = ht, + }); } } @@ -1196,44 +1302,34 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * for (auto & i : outputs) { drv.env[i] = ""; drv.outputs.insert_or_assign(i, - DerivationOutput { - .output = DerivationOutputInputAddressed { - .path = StorePath::dummy, - }, - }); + DerivationOutput::Deferred { }); } - // Regular, non-CA derivation should always return a single hash and not - // hash per output. auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true); - std::visit(overloaded { - [&](Hash & h) { - for (auto & i : outputs) { - auto outPath = state.store->makeOutputPath(i, h, drvName); - drv.env[i] = state.store->printStorePath(outPath); - drv.outputs.insert_or_assign(i, - DerivationOutput { - .output = DerivationOutputInputAddressed { - .path = std::move(outPath), - }, - }); - } - }, - [&](CaOutputHashes &) { - // Shouldn't happen as the toplevel derivation is not CA. - assert(false); - }, - [&](DeferredHash &) { - for (auto & i : outputs) { - drv.outputs.insert_or_assign(i, - DerivationOutput { - .output = DerivationOutputDeferred{}, - }); - } - }, - }, - hashModulo); - + switch (hashModulo.kind) { + case DrvHash::Kind::Regular: + for (auto & i : outputs) { + auto h = get(hashModulo.hashes, i); + if (!h) + throw AssertionError({ + .msg = hintfmt("derivation produced no hash for output '%s'", i), + .errPos = state.positions[posDrvName], + }); + auto outPath = state.store->makeOutputPath(i, *h, drvName); + drv.env[i] = state.store->printStorePath(outPath); + drv.outputs.insert_or_assign( + i, + DerivationOutputInputAddressed { + .path = std::move(outPath), + }); + } + break; + ; + case DrvHash::Kind::Deferred: + for (auto & i : outputs) { + drv.outputs.insert_or_assign(i, DerivationOutputDeferred {}); + } + } } /* Write the resulting term into the Nix store directory. */ @@ -1244,12 +1340,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * /* Optimisation, but required in read-only mode! because in that case we don't actually write store derivations, so we can't - read them later. - - However, we don't bother doing this for floating CA derivations because - their "hash modulo" is indeterminate until built. */ - if (drv.type() != DerivationType::CAFloating) { - auto h = hashDerivationModulo(*state.store, Derivation(drv), false); + read them later. */ + { + auto h = hashDerivationModulo(*state.store, drv, false); drvHashes.lock()->insert_or_assign(drvPath, h); } @@ -1273,7 +1366,7 @@ static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info { time, any occurrence of this string in an derivation attribute will be replaced with the concrete path in the Nix store of the output ‘out’. */ -static void prim_placeholder(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_placeholder(EvalState & state, const PosIdx pos, Value * * args, Value & v) { v.mkString(hashPlaceholder(state.forceStringNoCtx(*args[0], pos))); } @@ -1296,7 +1389,7 @@ static RegisterPrimOp primop_placeholder({ /* Convert the argument to a path. !!! obsolete? */ -static void prim_toPath(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_toPath(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; Path path = state.coerceToPath(pos, *args[0], context); @@ -1321,13 +1414,13 @@ static RegisterPrimOp primop_toPath({ /nix/store/newhash-oldhash-oldname. In the past, `toPath' had special case behaviour for store paths, but that created weird corner cases. */ -static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v) { if (evalSettings.pureEval) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); PathSet context; Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context)); @@ -1336,10 +1429,10 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V e.g. nix-push does the right thing. */ if (!state.store->isStorePath(path)) path = canonPath(path, true); if (!state.store->isInStore(path)) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("path '%1%' is not in the Nix store", path), - .errPos = pos - }); + .errPos = state.positions[pos] + })); auto path2 = state.store->toStorePath(path).first; if (!settings.readOnlyMode) state.store->ensurePath(path2); @@ -1365,7 +1458,7 @@ static RegisterPrimOp primop_storePath({ .fun = prim_storePath, }); -static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v) { /* We don’t check the path right now, because we don’t want to throw if the path isn’t allowed, but just return false (and we @@ -1397,7 +1490,7 @@ static RegisterPrimOp primop_pathExists({ /* Return the base name of the given string, i.e., everything following the last slash. */ -static void prim_baseNameOf(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; v.mkString(baseNameOf(*state.coerceToString(pos, *args[0], context, false, false)), context); @@ -1417,7 +1510,7 @@ static RegisterPrimOp primop_baseNameOf({ /* Return the directory of the given path, i.e., everything before the last slash. Return either a path or a string depending on the type of the argument. */ -static void prim_dirOf(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto path = state.coerceToString(pos, *args[0], context, false, false); @@ -1437,12 +1530,12 @@ static RegisterPrimOp primop_dirOf({ }); /* Return the contents of a file as a string. */ -static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto path = realisePath(state, pos, *args[0]); auto s = readFile(path); if (s.find((char) 0) != std::string::npos) - throw Error("the contents of the file '%1%' cannot be represented as a Nix string", path); + state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path)); StorePathSet refs; if (state.store->isInStore(path)) { try { @@ -1465,7 +1558,7 @@ static RegisterPrimOp primop_readFile({ /* Find a file in the Nix search path. Used to implement <x> paths, which are desugared to 'findFile __nixPath "x"'. */ -static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[0], pos); @@ -1494,13 +1587,12 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va auto rewrites = state.realiseContext(context); path = rewriteStrings(path, rewrites); } catch (InvalidPathError & e) { - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path), - .errPos = pos - }); + .errPos = state.positions[pos] + })); } - searchPath.emplace_back(prefix, path); } @@ -1516,15 +1608,15 @@ static RegisterPrimOp primop_findFile(RegisterPrimOp::Info { }); /* Return the cryptographic hash of a file in base-16. */ -static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto type = state.forceStringNoCtx(*args[0], pos); std::optional<HashType> ht = parseHashType(type); if (!ht) - throw Error({ + state.debugThrowLastTrace(Error({ .msg = hintfmt("unknown hash type '%1%'", type), - .errPos = pos - }); + .errPos = state.positions[pos] + })); auto path = realisePath(state, pos, *args[1]); @@ -1543,7 +1635,7 @@ static RegisterPrimOp primop_hashFile({ }); /* Read a directory (without . or ..) */ -static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto path = realisePath(state, pos, *args[0]); @@ -1592,7 +1684,7 @@ static RegisterPrimOp primop_readDir({ /* Convert the argument (which can be any Nix expression) to an XML representation returned in a string. Not all Nix expressions can be sensibly or completely represented (e.g., functions). */ -static void prim_toXML(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_toXML(EvalState & state, const PosIdx pos, Value * * args, Value & v) { std::ostringstream out; PathSet context; @@ -1700,7 +1792,7 @@ static RegisterPrimOp primop_toXML({ /* Convert the argument (which can be any Nix expression) to a JSON string. Not all Nix expressions can be sensibly or completely represented (e.g., functions). */ -static void prim_toJSON(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_toJSON(EvalState & state, const PosIdx pos, Value * * args, Value & v) { std::ostringstream out; PathSet context; @@ -1723,13 +1815,13 @@ static RegisterPrimOp primop_toJSON({ }); /* Parse a JSON string to a value. */ -static void prim_fromJSON(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_fromJSON(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto s = state.forceStringNoCtx(*args[0], pos); try { parseJSON(state, s, v); } catch (JSONParseError &e) { - e.addTrace(pos, "while decoding a JSON string"); + e.addTrace(state.positions[pos], "while decoding a JSON string"); throw; } } @@ -1751,7 +1843,7 @@ static RegisterPrimOp primop_fromJSON({ /* Store a string in the Nix store as a source file that can be used as an input by derivations. */ -static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; std::string name(state.forceStringNoCtx(*args[0], pos)); @@ -1761,25 +1853,26 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu for (auto path : context) { if (path.at(0) != '/') - throw EvalError( { + state.debugThrowLastTrace(EvalError({ .msg = hintfmt( "in 'toFile': the file named '%1%' must not contain a reference " "to a derivation but contains (%2%)", name, path), - .errPos = pos - }); + .errPos = state.positions[pos] + })); refs.insert(state.store->parseStorePath(path)); } - auto storePath = state.store->printStorePath(settings.readOnlyMode + auto storePath = settings.readOnlyMode ? state.store->computeStorePathForText(name, contents, refs) - : state.store->addTextToStore(name, contents, refs, state.repair)); + : state.store->addTextToStore(name, contents, refs, state.repair); /* Note: we don't need to add `context' to the context of the result, since `storePath' itself has references to the paths used in args[1]. */ - v.mkString(storePath, {storePath}); + /* Add the output of this to the allowed paths. */ + state.allowAndSetStorePathString(storePath, v); } static RegisterPrimOp primop_toFile({ @@ -1862,7 +1955,7 @@ static RegisterPrimOp primop_toFile({ static void addPath( EvalState & state, - const Pos & pos, + const PosIdx pos, const std::string & name, Path path, Value * filterFun, @@ -1919,40 +2012,35 @@ static void addPath( if (expectedHash) expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name); - Path dstPath; if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - dstPath = state.store->printStorePath(settings.readOnlyMode + StorePath dstPath = settings.readOnlyMode ? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first - : state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs)); - if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath)) - throw Error("store path mismatch in (possibly filtered) path added from '%s'", path); + : state.store->addToStore(name, path, method, htSHA256, filter, state.repair, refs); + if (expectedHash && expectedStorePath != dstPath) + state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); + state.allowAndSetStorePathString(dstPath, v); } else - dstPath = state.store->printStorePath(*expectedStorePath); - - v.mkString(dstPath, {dstPath}); - - state.allowPath(dstPath); - + state.allowAndSetStorePathString(*expectedStorePath, v); } catch (Error & e) { - e.addTrace(pos, "while adding path '%s'", path); + e.addTrace(state.positions[pos], "while adding path '%s'", path); throw; } } -static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; Path path = state.coerceToPath(pos, *args[1], context); state.forceValue(*args[0], pos); if (args[0]->type() != nFunction) - throw TypeError({ + state.debugThrowLastTrace(TypeError({ .msg = hintfmt( "first argument in call to 'filterSource' is not a function but %1%", showType(*args[0])), - .errPos = pos - }); + .errPos = state.positions[pos] + })); addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); } @@ -2012,7 +2100,7 @@ static RegisterPrimOp primop_filterSource({ .fun = prim_filterSource, }); -static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[0], pos); Path path; @@ -2023,29 +2111,29 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value PathSet context; for (auto & attr : *args[0]->attrs) { - auto & n(attr.name); + auto n = state.symbols[attr.name]; if (n == "path") - path = state.coerceToPath(*attr.pos, *attr.value, context); + path = state.coerceToPath(attr.pos, *attr.value, context); else if (attr.name == state.sName) - name = state.forceStringNoCtx(*attr.value, *attr.pos); + name = state.forceStringNoCtx(*attr.value, attr.pos); else if (n == "filter") { state.forceValue(*attr.value, pos); filterFun = attr.value; } else if (n == "recursive") - method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) }; + method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos) }; else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos), htSHA256); else - throw EvalError({ - .msg = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name), - .errPos = *attr.pos - }); + state.debugThrowLastTrace(EvalError({ + .msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]), + .errPos = state.positions[attr.pos] + })); } if (path.empty()) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("'path' required"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); if (name.empty()) name = baseNameOf(path); @@ -2095,7 +2183,7 @@ static RegisterPrimOp primop_path({ /* Return the names of the attributes in a set as a sorted list of strings. */ -static void prim_attrNames(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[0], pos); @@ -2103,7 +2191,7 @@ static void prim_attrNames(EvalState & state, const Pos & pos, Value * * args, V size_t n = 0; for (auto & i : *args[0]->attrs) - (v.listElems()[n++] = state.allocValue())->mkString(i.name); + (v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]); std::sort(v.listElems(), v.listElems() + n, [](Value * v1, Value * v2) { return strcmp(v1->string.s, v2->string.s) < 0; }); @@ -2122,7 +2210,7 @@ static RegisterPrimOp primop_attrNames({ /* Return the values of the attributes in a set as a list, in the same order as attrNames. */ -static void prim_attrValues(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_attrValues(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[0], pos); @@ -2133,8 +2221,9 @@ static void prim_attrValues(EvalState & state, const Pos & pos, Value * * args, v.listElems()[n++] = (Value *) &i; std::sort(v.listElems(), v.listElems() + n, - [](Value * v1, Value * v2) { - std::string_view s1 = ((Attr *) v1)->name, s2 = ((Attr *) v2)->name; + [&](Value * v1, Value * v2) { + std::string_view s1 = state.symbols[((Attr *) v1)->name], + s2 = state.symbols[((Attr *) v2)->name]; return s1 < s2; }); @@ -2153,7 +2242,7 @@ static RegisterPrimOp primop_attrValues({ }); /* Dynamic version of the `.' operator. */ -void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v) +void prim_getAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto attr = state.forceStringNoCtx(*args[0], pos); state.forceAttrs(*args[1], pos); @@ -2165,7 +2254,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v) pos ); // !!! add to stack trace? - if (state.countCalls && *i->pos != noPos) state.attrSelects[*i->pos]++; + if (state.countCalls && i->pos) state.attrSelects[i->pos]++; state.forceValue(*i->value, pos); v = *i->value; } @@ -2183,7 +2272,7 @@ static RegisterPrimOp primop_getAttr({ }); /* Return position information of the specified attribute. */ -static void prim_unsafeGetAttrPos(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_unsafeGetAttrPos(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto attr = state.forceStringNoCtx(*args[0], pos); state.forceAttrs(*args[1], pos); @@ -2201,7 +2290,7 @@ static RegisterPrimOp primop_unsafeGetAttrPos(RegisterPrimOp::Info { }); /* Dynamic version of the `?' operator. */ -static void prim_hasAttr(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_hasAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto attr = state.forceStringNoCtx(*args[0], pos); state.forceAttrs(*args[1], pos); @@ -2220,7 +2309,7 @@ static RegisterPrimOp primop_hasAttr({ }); /* Determine whether the argument is a set. */ -static void prim_isAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nAttrs); @@ -2235,7 +2324,7 @@ static RegisterPrimOp primop_isAttrs({ .fun = prim_isAttrs, }); -static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[0], pos); state.forceList(*args[1], pos); @@ -2283,7 +2372,7 @@ static RegisterPrimOp primop_removeAttrs({ "nameN"; value = valueN;}] is transformed to {name1 = value1; ... nameN = valueN;}. In case of duplicate occurrences of the same name, the first takes precedence. */ -static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[0], pos); @@ -2302,9 +2391,9 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args, pos ); - auto name = state.forceStringNoCtx(*j->value, *j->pos); + auto name = state.forceStringNoCtx(*j->value, j->pos); - Symbol sym = state.symbols.create(name); + auto sym = state.symbols.create(name); if (seen.insert(sym).second) { Bindings::iterator j2 = getAttr( state, @@ -2345,7 +2434,7 @@ static RegisterPrimOp primop_listToAttrs({ .fun = prim_listToAttrs, }); -static void prim_intersectAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_intersectAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[0], pos); state.forceAttrs(*args[1], pos); @@ -2365,15 +2454,15 @@ static RegisterPrimOp primop_intersectAttrs({ .name = "__intersectAttrs", .args = {"e1", "e2"}, .doc = R"( - Return a set consisting of the attributes in the set *e2* that also - exist in the set *e1*. + Return a set consisting of the attributes in the set *e2* which have the + same name as some attribute in *e1*. )", .fun = prim_intersectAttrs, }); -static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - Symbol attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos)); + auto attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos)); state.forceList(*args[1], pos); Value * res[args[1]->listSize()]; @@ -2408,7 +2497,7 @@ static RegisterPrimOp primop_catAttrs({ .fun = prim_catAttrs, }); -static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); if (args[0]->isPrimOpApp() || args[0]->isPrimOp()) { @@ -2416,10 +2505,10 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args return; } if (!args[0]->isLambda()) - throw TypeError({ + state.debugThrowLastTrace(TypeError({ .msg = hintfmt("'functionArgs' requires a function"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); if (!args[0]->lambda.fun->hasFormals()) { v.mkAttrs(&state.emptyBindings); @@ -2429,7 +2518,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args auto attrs = state.buildBindings(args[0]->lambda.fun->formals->formals.size()); for (auto & i : args[0]->lambda.fun->formals->formals) // !!! should optimise booleans (allocate only once) - attrs.alloc(i.name, ptr(&i.pos)).mkBool(i.def); + attrs.alloc(i.name, i.pos).mkBool(i.def); v.mkAttrs(attrs); } @@ -2451,7 +2540,7 @@ static RegisterPrimOp primop_functionArgs({ }); /* */ -static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_mapAttrs(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceAttrs(*args[1], pos); @@ -2460,7 +2549,7 @@ static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Va for (auto & i : *args[1]->attrs) { Value * vName = state.allocValue(); Value * vFun2 = state.allocValue(); - vName->mkString(i.name); + vName->mkString(state.symbols[i.name]); vFun2->mkApp(args[0], vName); attrs.alloc(i.name).mkApp(vFun2, i.value); } @@ -2483,7 +2572,7 @@ static RegisterPrimOp primop_mapAttrs({ .fun = prim_mapAttrs, }); -static void prim_zipAttrsWith(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * args, Value & v) { // we will first count how many values are present for each given key. // we then allocate a single attrset and pre-populate it with lists of @@ -2506,8 +2595,8 @@ static void prim_zipAttrsWith(EvalState & state, const Pos & pos, Value * * args for (auto & attr : *vElem->attrs) attrsSeen[attr.name].first++; } catch (TypeError & e) { - e.addTrace(pos, hintfmt("while invoking '%s'", "zipAttrsWith")); - throw; + e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "zipAttrsWith")); + state.debugThrowLastTrace(e); } } @@ -2527,7 +2616,7 @@ static void prim_zipAttrsWith(EvalState & state, const Pos & pos, Value * * args for (auto & attr : *v.attrs) { auto name = state.allocValue(); - name->mkString(attr.name); + name->mkString(state.symbols[attr.name]); auto call1 = state.allocValue(); call1->mkApp(args[0], name); auto call2 = state.allocValue(); @@ -2575,7 +2664,7 @@ static RegisterPrimOp primop_zipAttrsWith({ /* Determine whether the argument is a list. */ -static void prim_isList(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_isList(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); v.mkBool(args[0]->type() == nList); @@ -2590,20 +2679,20 @@ static RegisterPrimOp primop_isList({ .fun = prim_isList, }); -static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Value & v) +static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Value & v) { state.forceList(list, pos); if (n < 0 || (unsigned int) n >= list.listSize()) - throw Error({ + state.debugThrowLastTrace(Error({ .msg = hintfmt("list index %1% is out of bounds", n), - .errPos = pos - }); + .errPos = state.positions[pos] + })); state.forceValue(*list.listElems()[n], pos); v = *list.listElems()[n]; } /* Return the n-1'th element of a list. */ -static void prim_elemAt(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_elemAt(EvalState & state, const PosIdx pos, Value * * args, Value & v) { elemAt(state, pos, *args[0], state.forceInt(*args[1], pos), v); } @@ -2619,7 +2708,7 @@ static RegisterPrimOp primop_elemAt({ }); /* Return the first element of a list. */ -static void prim_head(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_head(EvalState & state, const PosIdx pos, Value * * args, Value & v) { elemAt(state, pos, *args[0], 0, v); } @@ -2638,14 +2727,14 @@ static RegisterPrimOp primop_head({ /* Return a list consisting of everything but the first element of a list. Warning: this function takes O(n) time, so you probably don't want to use it! */ -static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[0], pos); if (args[0]->listSize() == 0) - throw Error({ + state.debugThrowLastTrace(Error({ .msg = hintfmt("'tail' called on an empty list"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); state.mkList(v, args[0]->listSize() - 1); for (unsigned int n = 0; n < v.listSize(); ++n) @@ -2669,7 +2758,7 @@ static RegisterPrimOp primop_tail({ }); /* Apply a function to every element of a list. */ -static void prim_map(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_map(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[1], pos); @@ -2699,7 +2788,7 @@ static RegisterPrimOp primop_map({ /* Filter a list using a predicate; that is, return a list containing every element from the list for which the predicate function returns true. */ -static void prim_filter(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceFunction(*args[0], pos); state.forceList(*args[1], pos); @@ -2737,7 +2826,7 @@ static RegisterPrimOp primop_filter({ }); /* Return true if a list contains a given element. */ -static void prim_elem(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_elem(EvalState & state, const PosIdx pos, Value * * args, Value & v) { bool res = false; state.forceList(*args[1], pos); @@ -2760,7 +2849,7 @@ static RegisterPrimOp primop_elem({ }); /* Concatenate a list of lists. */ -static void prim_concatLists(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_concatLists(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[0], pos); state.concatLists(v, args[0]->listSize(), args[0]->listElems(), pos); @@ -2776,7 +2865,7 @@ static RegisterPrimOp primop_concatLists({ }); /* Return the length of a list. This is an O(1) time operation. */ -static void prim_length(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_length(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[0], pos); v.mkInt(args[0]->listSize()); @@ -2793,7 +2882,7 @@ static RegisterPrimOp primop_length({ /* Reduce a list by applying a binary operator, from left to right. The operator is applied strictly. */ -static void prim_foldlStrict(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_foldlStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceFunction(*args[0], pos); state.forceList(*args[2], pos); @@ -2826,7 +2915,7 @@ static RegisterPrimOp primop_foldlStrict({ .fun = prim_foldlStrict, }); -static void anyOrAll(bool any, EvalState & state, const Pos & pos, Value * * args, Value & v) +static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceFunction(*args[0], pos); state.forceList(*args[1], pos); @@ -2845,7 +2934,7 @@ static void anyOrAll(bool any, EvalState & state, const Pos & pos, Value * * arg } -static void prim_any(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_any(EvalState & state, const PosIdx pos, Value * * args, Value & v) { anyOrAll(true, state, pos, args, v); } @@ -2860,7 +2949,7 @@ static RegisterPrimOp primop_any({ .fun = prim_any, }); -static void prim_all(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_all(EvalState & state, const PosIdx pos, Value * * args, Value & v) { anyOrAll(false, state, pos, args, v); } @@ -2875,15 +2964,15 @@ static RegisterPrimOp primop_all({ .fun = prim_all, }); -static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto len = state.forceInt(*args[1], pos); if (len < 0) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("cannot create list of size %1%", len), - .errPos = pos - }); + .errPos = state.positions[pos] + })); state.mkList(v, len); @@ -2910,10 +2999,10 @@ static RegisterPrimOp primop_genList({ .fun = prim_genList, }); -static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v); +static void prim_lessThan(EvalState & state, const PosIdx pos, Value * * args, Value & v); -static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceFunction(*args[0], pos); state.forceList(*args[1], pos); @@ -2964,7 +3053,7 @@ static RegisterPrimOp primop_sort({ .fun = prim_sort, }); -static void prim_partition(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_partition(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceFunction(*args[0], pos); state.forceList(*args[1], pos); @@ -3024,7 +3113,7 @@ static RegisterPrimOp primop_partition({ .fun = prim_partition, }); -static void prim_groupBy(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_groupBy(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceFunction(*args[0], pos); state.forceList(*args[1], pos); @@ -3035,7 +3124,7 @@ static void prim_groupBy(EvalState & state, const Pos & pos, Value * * args, Val Value res; state.callFunction(*args[0], *vElem, res, pos); auto name = state.forceStringNoCtx(res, pos); - Symbol sym = state.symbols.create(name); + auto sym = state.symbols.create(name); auto vector = attrs.try_emplace(sym, ValueVector()).first; vector->second.push_back(vElem); } @@ -3076,7 +3165,7 @@ static RegisterPrimOp primop_groupBy({ .fun = prim_groupBy, }); -static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceFunction(*args[0], pos); state.forceList(*args[1], pos); @@ -3091,8 +3180,8 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V try { state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos))); } catch (TypeError &e) { - e.addTrace(pos, hintfmt("while invoking '%s'", "concatMap")); - throw; + e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "concatMap")); + state.debugThrowLastTrace(e); } len += lists[n].listSize(); } @@ -3123,7 +3212,7 @@ static RegisterPrimOp primop_concatMap({ *************************************************************/ -static void prim_add(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_add(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -3142,7 +3231,7 @@ static RegisterPrimOp primop_add({ .fun = prim_add, }); -static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_sub(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -3161,7 +3250,7 @@ static RegisterPrimOp primop_sub({ .fun = prim_sub, }); -static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_mul(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -3180,17 +3269,17 @@ static RegisterPrimOp primop_mul({ .fun = prim_mul, }); -static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); NixFloat f2 = state.forceFloat(*args[1], pos); if (f2 == 0) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("division by zero"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); if (args[0]->type() == nFloat || args[1]->type() == nFloat) { v.mkFloat(state.forceFloat(*args[0], pos) / state.forceFloat(*args[1], pos)); @@ -3199,10 +3288,10 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value & NixInt i2 = state.forceInt(*args[1], pos); /* Avoid division overflow as it might raise SIGFPE. */ if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("overflow in integer division"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); v.mkInt(i1 / i2); } @@ -3217,7 +3306,7 @@ static RegisterPrimOp primop_div({ .fun = prim_div, }); -static void prim_bitAnd(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_bitAnd(EvalState & state, const PosIdx pos, Value * * args, Value & v) { v.mkInt(state.forceInt(*args[0], pos) & state.forceInt(*args[1], pos)); } @@ -3231,7 +3320,7 @@ static RegisterPrimOp primop_bitAnd({ .fun = prim_bitAnd, }); -static void prim_bitOr(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_bitOr(EvalState & state, const PosIdx pos, Value * * args, Value & v) { v.mkInt(state.forceInt(*args[0], pos) | state.forceInt(*args[1], pos)); } @@ -3245,7 +3334,7 @@ static RegisterPrimOp primop_bitOr({ .fun = prim_bitOr, }); -static void prim_bitXor(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_bitXor(EvalState & state, const PosIdx pos, Value * * args, Value & v) { v.mkInt(state.forceInt(*args[0], pos) ^ state.forceInt(*args[1], pos)); } @@ -3259,7 +3348,7 @@ static RegisterPrimOp primop_bitXor({ .fun = prim_bitXor, }); -static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_lessThan(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); state.forceValue(*args[1], pos); @@ -3287,7 +3376,7 @@ static RegisterPrimOp primop_lessThan({ /* Convert the argument to a string. Paths are *not* copied to the store, so `toString /foo/bar' yields `"/foo/bar"', not `"/nix/store/whatever..."'. */ -static void prim_toString(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_toString(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto s = state.coerceToString(pos, *args[0], context, true, false); @@ -3322,7 +3411,7 @@ static RegisterPrimOp primop_toString({ at character position `min(start, stringLength str)' inclusive and ending at `min(start + len, stringLength str)'. `start' must be non-negative. */ -static void prim_substring(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v) { int start = state.forceInt(*args[0], pos); int len = state.forceInt(*args[1], pos); @@ -3330,10 +3419,10 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V auto s = state.coerceToString(pos, *args[2], context); if (start < 0) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("negative start position in 'substring'"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); v.mkString((unsigned int) start >= s->size() ? "" : s->substr(start, len), context); } @@ -3358,7 +3447,7 @@ static RegisterPrimOp primop_substring({ .fun = prim_substring, }); -static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_stringLength(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto s = state.coerceToString(pos, *args[0], context); @@ -3376,15 +3465,15 @@ static RegisterPrimOp primop_stringLength({ }); /* Return the cryptographic hash of a string in base-16. */ -static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto type = state.forceStringNoCtx(*args[0], pos); std::optional<HashType> ht = parseHashType(type); if (!ht) - throw Error({ + state.debugThrowLastTrace(Error({ .msg = hintfmt("unknown hash type '%1%'", type), - .errPos = pos - }); + .errPos = state.positions[pos] + })); PathSet context; // discarded auto s = state.forceString(*args[1], context, pos); @@ -3424,7 +3513,7 @@ std::shared_ptr<RegexCache> makeRegexCache() return std::make_shared<RegexCache>(); } -void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v) +void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto re = state.forceStringNoCtx(*args[0], pos); @@ -3451,19 +3540,18 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v) (v.listElems()[i] = state.allocValue())->mkString(match[i + 1].str()); } - } catch (std::regex_error &e) { + } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = pos - }); - } else { - throw EvalError({ + .errPos = state.positions[pos] + })); + } else + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = pos - }); - } + .errPos = state.positions[pos] + })); } } @@ -3498,14 +3586,14 @@ static RegisterPrimOp primop_match({ builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO " ``` - Evaluates to `[ "foo" ]`. + Evaluates to `[ "FOO" ]`. )s", .fun = prim_match, }); /* Split a string with a regular expression, and return a list of the non-matching parts interleaved by the lists of the matching groups. */ -void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v) +void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto re = state.forceStringNoCtx(*args[0], pos); @@ -3556,19 +3644,18 @@ void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v) assert(idx == 2 * len + 1); - } catch (std::regex_error &e) { + } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = pos - }); - } else { - throw EvalError({ + .errPos = state.positions[pos] + })); + } else + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = pos - }); - } + .errPos = state.positions[pos] + })); } } @@ -3601,7 +3688,7 @@ static RegisterPrimOp primop_split({ Evaluates to `[ "" [ "a" null ] "b" [ null "c" ] "" ]`. ```nix - builtins.split "([[:upper:]]+)" " FOO " + builtins.split "([[:upper:]]+)" " FOO " ``` Evaluates to `[ " " [ "FOO" ] " " ]`. @@ -3609,7 +3696,7 @@ static RegisterPrimOp primop_split({ .fun = prim_split, }); -static void prim_concatStringsSep(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; @@ -3639,15 +3726,15 @@ static RegisterPrimOp primop_concatStringsSep({ .fun = prim_concatStringsSep, }); -static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * args, Value & v) { state.forceList(*args[0], pos); state.forceList(*args[1], pos); if (args[0]->listSize() != args[1]->listSize()) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); std::vector<std::string> from; from.reserve(args[0]->listSize()); @@ -3719,7 +3806,7 @@ static RegisterPrimOp primop_replaceStrings({ *************************************************************/ -static void prim_parseDrvName(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_parseDrvName(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto name = state.forceStringNoCtx(*args[0], pos); DrvName parsed(name); @@ -3734,8 +3821,8 @@ static RegisterPrimOp primop_parseDrvName({ .args = {"s"}, .doc = R"( Split the string *s* into a package name and version. The package - name is everything up to but not including the first dash followed - by a digit, and the version is everything following that dash. The + name is everything up to but not including the first dash not followed + by a letter, and the version is everything following that dash. The result is returned in a set `{ name, version }`. Thus, `builtins.parseDrvName "nix-0.12pre12876"` returns `{ name = "nix"; version = "0.12pre12876"; }`. @@ -3743,7 +3830,7 @@ static RegisterPrimOp primop_parseDrvName({ .fun = prim_parseDrvName, }); -static void prim_compareVersions(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_compareVersions(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto version1 = state.forceStringNoCtx(*args[0], pos); auto version2 = state.forceStringNoCtx(*args[1], pos); @@ -3763,7 +3850,7 @@ static RegisterPrimOp primop_compareVersions({ .fun = prim_compareVersions, }); -static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_splitVersion(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto version = state.forceStringNoCtx(*args[0], pos); auto iter = version.cbegin(); @@ -3806,7 +3893,7 @@ RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun) .name = name, .args = {}, .arity = arity, - .fun = fun + .fun = fun, }); } @@ -3865,6 +3952,18 @@ void EvalState::createBaseEnv() addPrimOp("__exec", 1, prim_exec); } + addPrimOp({ + .fun = evalSettings.traceVerbose ? prim_trace : prim_second, + .arity = 2, + .name = "__traceVerbose", + .args = { "e1", "e2" }, + .doc = R"( + Evaluate *e1* and print its abstract syntax representation on standard + error if `--trace-verbose` is enabled. Then return *e2*. This function + is useful for debugging. + )", + }); + /* Add a value containing the current Nix expression search path. */ mkList(v, searchPath.size()); int n = 0; @@ -3878,17 +3977,21 @@ void EvalState::createBaseEnv() if (RegisterPrimOp::primOps) for (auto & primOp : *RegisterPrimOp::primOps) - addPrimOp({ - .fun = primOp.fun, - .arity = std::max(primOp.args.size(), primOp.arity), - .name = symbols.create(primOp.name), - .args = primOp.args, - .doc = primOp.doc, - }); + if (!primOp.experimentalFeature + || settings.isExperimentalFeatureEnabled(*primOp.experimentalFeature)) + { + addPrimOp({ + .fun = primOp.fun, + .arity = std::max(primOp.args.size(), primOp.arity), + .name = primOp.name, + .args = primOp.args, + .doc = primOp.doc, + }); + } /* Add a wrapper around the derivation primop that computes the `drvPath' and `outPath' attributes lazily. */ - sDerivationNix = symbols.create("//builtin/derivation.nix"); + sDerivationNix = symbols.create(derivationNixPath); auto vDerivation = allocValue(); addConstant("derivation", vDerivation); @@ -3896,7 +3999,7 @@ void EvalState::createBaseEnv() because attribute lookups expect it to be sorted. */ baseEnv.values[0]->attrs->sort(); - staticBaseEnv.sort(); + staticBaseEnv->sort(); /* Note: we have to initialize the 'derivation' constant *after* building baseEnv/staticBaseEnv because it uses 'builtins'. */ @@ -3905,7 +4008,7 @@ void EvalState::createBaseEnv() // the parser needs two NUL bytes as terminators; one of them // is implied by being a C string. "\0"; - eval(parse(code, sizeof(code), foFile, sDerivationNix, "/", staticBaseEnv), *vDerivation); + eval(parse(code, sizeof(code), foFile, derivationNixPath, "/", staticBaseEnv), *vDerivation); } diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh index 5b16e075f..1cfb4356b 100644 --- a/src/libexpr/primops.hh +++ b/src/libexpr/primops.hh @@ -16,6 +16,7 @@ struct RegisterPrimOp size_t arity = 0; const char * doc; PrimOpFun fun; + std::optional<ExperimentalFeature> experimentalFeature; }; typedef std::vector<Info> PrimOps; @@ -35,10 +36,11 @@ struct RegisterPrimOp /* These primops are disabled without enableNativeCode, but plugins may wish to use them in limited contexts without globally enabling them. */ + /* Load a ValueInitializer from a DSO and return whatever it initializes */ -void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value & v); +void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Value & v); /* Execute a program and parse its output */ -void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v); +void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v); } diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 3701bd442..979136984 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -1,10 +1,11 @@ #include "primops.hh" #include "eval-inline.hh" +#include "derivations.hh" #include "store-api.hh" namespace nix { -static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto s = state.coerceToString(pos, *args[0], context); @@ -14,7 +15,7 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext); -static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; state.forceString(*args[0], context, pos); @@ -30,7 +31,7 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext); source-only deployment). This primop marks the string context so that builtins.derivation adds the path to drv.inputSrcs rather than drv.inputDrvs. */ -static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto s = state.coerceToString(pos, *args[0], context); @@ -64,7 +65,7 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutpu Note that for a given path any combination of the above attributes may be present. */ -static void prim_getContext(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { struct ContextInfo { bool path = false; @@ -82,8 +83,8 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args, drv = std::string(p, 1); path = &drv; } else if (p.at(0) == '!') { - std::pair<std::string, std::string> ctx = decodeContext(p); - drv = ctx.first; + NixStringContextElem ctx = decodeContext(*state.store, p); + drv = state.store->printStorePath(ctx.first); output = ctx.second; path = &drv; } @@ -133,7 +134,7 @@ static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext); See the commentary above unsafeGetContext for details of the context representation. */ -static void prim_appendContext(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { PathSet context; auto orig = state.forceString(*args[0], context, pos); @@ -143,45 +144,46 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg auto sPath = state.symbols.create("path"); auto sAllOutputs = state.symbols.create("allOutputs"); for (auto & i : *args[1]->attrs) { - if (!state.store->isStorePath(i.name)) + const auto & name = state.symbols[i.name]; + if (!state.store->isStorePath(name)) throw EvalError({ - .msg = hintfmt("Context key '%s' is not a store path", i.name), - .errPos = *i.pos + .msg = hintfmt("Context key '%s' is not a store path", name), + .errPos = state.positions[i.pos] }); if (!settings.readOnlyMode) - state.store->ensurePath(state.store->parseStorePath(i.name)); - state.forceAttrs(*i.value, *i.pos); + state.store->ensurePath(state.store->parseStorePath(name)); + state.forceAttrs(*i.value, i.pos); auto iter = i.value->attrs->find(sPath); if (iter != i.value->attrs->end()) { - if (state.forceBool(*iter->value, *iter->pos)) - context.insert(i.name); + if (state.forceBool(*iter->value, iter->pos)) + context.emplace(name); } iter = i.value->attrs->find(sAllOutputs); if (iter != i.value->attrs->end()) { - if (state.forceBool(*iter->value, *iter->pos)) { - if (!isDerivation(i.name)) { + if (state.forceBool(*iter->value, iter->pos)) { + if (!isDerivation(name)) { throw EvalError({ - .msg = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name), - .errPos = *i.pos + .msg = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", name), + .errPos = state.positions[i.pos] }); } - context.insert("=" + std::string(i.name)); + context.insert(concatStrings("=", name)); } } iter = i.value->attrs->find(state.sOutputs); if (iter != i.value->attrs->end()) { - state.forceList(*iter->value, *iter->pos); - if (iter->value->listSize() && !isDerivation(i.name)) { + state.forceList(*iter->value, iter->pos); + if (iter->value->listSize() && !isDerivation(name)) { throw EvalError({ - .msg = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name), - .errPos = *i.pos + .msg = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", name), + .errPos = state.positions[i.pos] }); } for (auto elem : iter->value->listItems()) { - auto name = state.forceStringNoCtx(*elem, *iter->pos); - context.insert(concatStrings("!", name, "!", i.name)); + auto outputName = state.forceStringNoCtx(*elem, iter->pos); + context.insert(concatStrings("!", outputName, "!", name)); } } } diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc new file mode 100644 index 000000000..662c9652e --- /dev/null +++ b/src/libexpr/primops/fetchClosure.cc @@ -0,0 +1,163 @@ +#include "primops.hh" +#include "store-api.hh" +#include "make-content-addressed.hh" +#include "url.hh" + +namespace nix { + +static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v) +{ + state.forceAttrs(*args[0], pos); + + std::optional<std::string> fromStoreUrl; + std::optional<StorePath> fromPath; + bool toCA = false; + std::optional<StorePath> toPath; + + for (auto & attr : *args[0]->attrs) { + const auto & attrName = state.symbols[attr.name]; + + if (attrName == "fromPath") { + PathSet context; + fromPath = state.coerceToStorePath(attr.pos, *attr.value, context); + } + + else if (attrName == "toPath") { + state.forceValue(*attr.value, attr.pos); + toCA = true; + if (attr.value->type() != nString || attr.value->string.s != std::string("")) { + PathSet context; + toPath = state.coerceToStorePath(attr.pos, *attr.value, context); + } + } + + else if (attrName == "fromStore") + fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos); + + else + throw Error({ + .msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), + .errPos = state.positions[pos] + }); + } + + if (!fromPath) + throw Error({ + .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), + .errPos = state.positions[pos] + }); + + if (!fromStoreUrl) + throw Error({ + .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), + .errPos = state.positions[pos] + }); + + auto parsedURL = parseURL(*fromStoreUrl); + + if (parsedURL.scheme != "http" && + parsedURL.scheme != "https" && + !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) + throw Error({ + .msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"), + .errPos = state.positions[pos] + }); + + if (!parsedURL.query.empty()) + throw Error({ + .msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), + .errPos = state.positions[pos] + }); + + auto fromStore = openStore(parsedURL.to_string()); + + if (toCA) { + if (!toPath || !state.store->isValidPath(*toPath)) { + auto remappings = makeContentAddressed(*fromStore, *state.store, { *fromPath }); + auto i = remappings.find(*fromPath); + assert(i != remappings.end()); + if (toPath && *toPath != i->second) + throw Error({ + .msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", + state.store->printStorePath(*fromPath), + state.store->printStorePath(i->second), + state.store->printStorePath(*toPath)), + .errPos = state.positions[pos] + }); + if (!toPath) + throw Error({ + .msg = hintfmt( + "rewriting '%s' to content-addressed form yielded '%s'; " + "please set this in the 'toPath' attribute passed to 'fetchClosure'", + state.store->printStorePath(*fromPath), + state.store->printStorePath(i->second)), + .errPos = state.positions[pos] + }); + } + } else { + if (!state.store->isValidPath(*fromPath)) + copyClosure(*fromStore, *state.store, RealisedPath::Set { *fromPath }); + toPath = fromPath; + } + + /* In pure mode, require a CA path. */ + if (evalSettings.pureEval) { + auto info = state.store->queryPathInfo(*toPath); + if (!info->isContentAddressed(*state.store)) + throw Error({ + .msg = hintfmt("in pure mode, 'fetchClosure' requires a content-addressed path, which '%s' isn't", + state.store->printStorePath(*toPath)), + .errPos = state.positions[pos] + }); + } + + auto toPathS = state.store->printStorePath(*toPath); + v.mkString(toPathS, {toPathS}); +} + +static RegisterPrimOp primop_fetchClosure({ + .name = "__fetchClosure", + .args = {"args"}, + .doc = R"( + Fetch a Nix store closure from a binary cache, rewriting it into + content-addressed form. For example, + + ```nix + builtins.fetchClosure { + fromStore = "https://cache.nixos.org"; + fromPath = /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1; + toPath = /nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1; + } + ``` + + fetches `/nix/store/r2jd...` from the specified binary cache, + and rewrites it into the content-addressed store path + `/nix/store/ldbh...`. + + If `fromPath` is already content-addressed, or if you are + allowing impure evaluation (`--impure`), then `toPath` may be + omitted. + + To find out the correct value for `toPath` given a `fromPath`, + you can use `nix store make-content-addressed`: + + ```console + # nix store make-content-addressed --from https://cache.nixos.org /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1 + rewrote '/nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1' to '/nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1' + ``` + + This function is similar to `builtins.storePath` in that it + allows you to use a previously built store path in a Nix + expression. However, it is more reproducible because it requires + specifying a binary cache from which the path can be fetched. + Also, requiring a content-addressed final store path avoids the + need for users to configure binary cache public keys. + + This function is only available if you enable the experimental + feature `fetch-closure`. + )", + .fun = prim_fetchClosure, + .experimentalFeature = Xp::FetchClosure, +}); + +} diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index b7f715859..249c0934e 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -7,7 +7,7 @@ namespace nix { -static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * args, Value & v) { std::string url; std::optional<Hash> rev; @@ -22,31 +22,31 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar state.forceAttrs(*args[0], pos); for (auto & attr : *args[0]->attrs) { - std::string_view n(attr.name); + std::string_view n(state.symbols[attr.name]); if (n == "url") - url = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned(); + url = state.coerceToString(attr.pos, *attr.value, context, false, false).toOwned(); else if (n == "rev") { // Ugly: unlike fetchGit, here the "rev" attribute can // be both a revision or a branch/tag name. - auto value = state.forceStringNoCtx(*attr.value, *attr.pos); + auto value = state.forceStringNoCtx(*attr.value, attr.pos); if (std::regex_match(value.begin(), value.end(), revRegex)) rev = Hash::parseAny(value, htSHA1); else ref = value; } else if (n == "name") - name = state.forceStringNoCtx(*attr.value, *attr.pos); + name = state.forceStringNoCtx(*attr.value, attr.pos); else throw EvalError({ - .msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name), - .errPos = *attr.pos + .msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]), + .errPos = state.positions[attr.pos] }); } if (url.empty()) throw EvalError({ .msg = hintfmt("'url' argument required"), - .errPos = pos + .errPos = state.positions[pos] }); } else diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index f3e3e70d8..84e7f5c02 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -90,7 +90,7 @@ struct FetchTreeParams { static void fetchTree( EvalState & state, - const Pos & pos, + const PosIdx pos, Value * * args, Value & v, std::optional<std::string> type, @@ -108,46 +108,46 @@ static void fetchTree( if (auto aType = args[0]->attrs->get(state.sType)) { if (type) - throw Error({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("unexpected attribute 'type'"), - .errPos = pos - }); - type = state.forceStringNoCtx(*aType->value, *aType->pos); + .errPos = state.positions[pos] + })); + type = state.forceStringNoCtx(*aType->value, aType->pos); } else if (!type) - throw Error({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); attrs.emplace("type", type.value()); for (auto & attr : *args[0]->attrs) { if (attr.name == state.sType) continue; - state.forceValue(*attr.value, *attr.pos); + state.forceValue(*attr.value, attr.pos); if (attr.value->type() == nPath || attr.value->type() == nString) { - auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned(); - attrs.emplace(attr.name, - attr.name == "url" + auto s = state.coerceToString(attr.pos, *attr.value, context, false, false).toOwned(); + attrs.emplace(state.symbols[attr.name], + state.symbols[attr.name] == "url" ? type == "git" ? fixURIForGit(s, state) : fixURI(s, state) : s); } else if (attr.value->type() == nBool) - attrs.emplace(attr.name, Explicit<bool>{attr.value->boolean}); + attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean}); else if (attr.value->type() == nInt) - attrs.emplace(attr.name, uint64_t(attr.value->integer)); + attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer)); else - throw TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", - attr.name, showType(*attr.value)); + state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", + state.symbols[attr.name], showType(*attr.value))); } if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) - throw Error({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); input = fetchers::Input::fromAttrs(std::move(attrs)); } else { @@ -167,7 +167,7 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) - throw Error("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", pos); + state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); auto [tree, input2] = input.fetch(state.store); @@ -176,7 +176,7 @@ static void fetchTree( emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false); } -static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v) { settings.requireExperimentalFeature(Xp::Flakes); fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false }); @@ -185,7 +185,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V // FIXME: document static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree); -static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, +static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v, const std::string & who, bool unpack, std::string name) { std::optional<std::string> url; @@ -198,25 +198,25 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, state.forceAttrs(*args[0], pos); for (auto & attr : *args[0]->attrs) { - std::string n(attr.name); + std::string_view n(state.symbols[attr.name]); if (n == "url") - url = state.forceStringNoCtx(*attr.value, *attr.pos); + url = state.forceStringNoCtx(*attr.value, attr.pos); else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos), htSHA256); else if (n == "name") - name = state.forceStringNoCtx(*attr.value, *attr.pos); + name = state.forceStringNoCtx(*attr.value, attr.pos); else - throw EvalError({ - .msg = hintfmt("unsupported argument '%s' to '%s'", attr.name, who), - .errPos = *attr.pos - }); - } + state.debugThrowLastTrace(EvalError({ + .msg = hintfmt("unsupported argument '%s' to '%s'", n, who), + .errPos = state.positions[attr.pos] + })); + } if (!url) - throw EvalError({ + state.debugThrowLastTrace(EvalError({ .msg = hintfmt("'url' argument required"), - .errPos = pos - }); + .errPos = state.positions[pos] + })); } else url = state.forceStringNoCtx(*args[0], pos); @@ -228,8 +228,23 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, name = baseNameOf(*url); if (evalSettings.pureEval && !expectedHash) - throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who); + state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who)); + + // early exit if pinned and already in the store + if (expectedHash && expectedHash->type == htSHA256) { + auto expectedPath = + unpack + ? state.store->makeFixedOutputPath(FileIngestionMethod::Recursive, *expectedHash, name, {}) + : state.store->makeFixedOutputPath(FileIngestionMethod::Flat, *expectedHash, name, {}); + + if (state.store->isValidPath(expectedPath)) { + state.allowAndSetStorePathString(expectedPath, v); + return; + } + } + // TODO: fetching may fail, yet the path may be substitutable. + // https://github.com/NixOS/nix/issues/4313 auto storePath = unpack ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath @@ -240,17 +255,14 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, ? state.store->queryPathInfo(storePath)->narHash : hashFile(htSHA256, state.store->toRealPath(storePath)); if (hash != *expectedHash) - throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)); + state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", + *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true))); } - state.allowPath(storePath); - - auto path = state.store->printStorePath(storePath); - v.mkString(path, PathSet({path})); + state.allowAndSetStorePathString(storePath, v); } -static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_fetchurl(EvalState & state, const PosIdx pos, Value * * args, Value & v) { fetch(state, pos, args, v, "fetchurl", false, ""); } @@ -266,7 +278,7 @@ static RegisterPrimOp primop_fetchurl({ .fun = prim_fetchurl, }); -static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v) +static void prim_fetchTarball(EvalState & state, const PosIdx pos, Value * * args, Value & v) { fetch(state, pos, args, v, "fetchTarball", true, "source"); } @@ -317,7 +329,7 @@ static RegisterPrimOp primop_fetchTarball({ .fun = prim_fetchTarball, }); -static void prim_fetchGit(EvalState &state, const Pos &pos, Value **args, Value &v) +static void prim_fetchGit(EvalState & state, const PosIdx pos, Value * * args, Value & v) { fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true }); } @@ -352,6 +364,10 @@ static RegisterPrimOp primop_fetchGit({ A Boolean parameter that specifies whether submodules should be checked out. Defaults to `false`. + - shallow\ + A Boolean parameter that specifies whether fetching a shallow clone + is allowed. Defaults to `false`. + - allRefs\ Whether to fetch all refs of the repository. With this argument being true, it's possible to load a `rev` from *any* `ref` (by default only diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index dd4280030..9753e2ac9 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -5,7 +5,7 @@ namespace nix { -static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Value & val) +static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, Value & val) { auto toml = state.forceStringNoCtx(*args[0], pos); @@ -73,7 +73,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va } catch (std::exception & e) { // TODO: toml::syntax_error throw EvalError({ .msg = hintfmt("while parsing a TOML string: %s", e.what()), - .errPos = pos + .errPos = state.positions[pos] }); } } diff --git a/src/libexpr/symbol-table.hh b/src/libexpr/symbol-table.hh index 48d20c29d..288c15602 100644 --- a/src/libexpr/symbol-table.hh +++ b/src/libexpr/symbol-table.hh @@ -5,44 +5,32 @@ #include <unordered_map> #include "types.hh" +#include "chunked-vector.hh" namespace nix { /* Symbol table used by the parser and evaluator to represent and look up identifiers and attributes efficiently. SymbolTable::create() converts a string into a symbol. Symbols have the property that - they can be compared efficiently (using a pointer equality test), + they can be compared efficiently (using an equality test), because the symbol table stores only one copy of each string. */ -class Symbol +/* This class mainly exists to give us an operator<< for ostreams. We could also + return plain strings from SymbolTable, but then we'd have to wrap every + instance of a symbol that is fmt()ed, which is inconvenient and error-prone. */ +class SymbolStr { -private: - const std::string * s; // pointer into SymbolTable - Symbol(const std::string * s) : s(s) { }; friend class SymbolTable; -public: - Symbol() : s(0) { }; +private: + const std::string * s; - bool operator == (const Symbol & s2) const - { - return s == s2.s; - } + explicit SymbolStr(const std::string & symbol): s(&symbol) {} - // FIXME: remove +public: bool operator == (std::string_view s2) const { - return s->compare(s2) == 0; - } - - bool operator != (const Symbol & s2) const - { - return s != s2.s; - } - - bool operator < (const Symbol & s2) const - { - return s < s2.s; + return *s == s2; } operator const std::string & () const @@ -55,51 +43,78 @@ public: return *s; } - bool set() const - { - return s; - } + friend std::ostream & operator <<(std::ostream & os, const SymbolStr & symbol); +}; - bool empty() const - { - return s->empty(); - } +class Symbol +{ + friend class SymbolTable; + +private: + uint32_t id; + + explicit Symbol(uint32_t id): id(id) {} + +public: + Symbol() : id(0) {} - friend std::ostream & operator << (std::ostream & str, const Symbol & sym); + explicit operator bool() const { return id > 0; } + + bool operator<(const Symbol other) const { return id < other.id; } + bool operator==(const Symbol other) const { return id == other.id; } + bool operator!=(const Symbol other) const { return id != other.id; } }; class SymbolTable { private: - std::unordered_map<std::string_view, Symbol> symbols; - std::list<std::string> store; + std::unordered_map<std::string_view, std::pair<const std::string *, uint32_t>> symbols; + ChunkedVector<std::string, 8192> store{16}; public: + Symbol create(std::string_view s) { // Most symbols are looked up more than once, so we trade off insertion performance // for lookup performance. // TODO: could probably be done more efficiently with transparent Hash and Equals // on the original implementation using unordered_set + // FIXME: make this thread-safe. auto it = symbols.find(s); - if (it != symbols.end()) return it->second; + if (it != symbols.end()) return Symbol(it->second.second + 1); + + const auto & [rawSym, idx] = store.add(std::string(s)); + symbols.emplace(rawSym, std::make_pair(&rawSym, idx)); + return Symbol(idx + 1); + } - auto & rawSym = store.emplace_back(s); - return symbols.emplace(rawSym, Symbol(&rawSym)).first->second; + std::vector<SymbolStr> resolve(const std::vector<Symbol> & symbols) const + { + std::vector<SymbolStr> result; + result.reserve(symbols.size()); + for (auto sym : symbols) + result.push_back((*this)[sym]); + return result; + } + + SymbolStr operator[](Symbol s) const + { + if (s.id == 0 || s.id > store.size()) + abort(); + return SymbolStr(store[s.id - 1]); } size_t size() const { - return symbols.size(); + return store.size(); } size_t totalSize() const; template<typename T> - void dump(T callback) + void dump(T callback) const { - for (auto & s : store) - callback(s); + store.forEach(callback); } }; diff --git a/src/libexpr/tests/json.cc b/src/libexpr/tests/json.cc new file mode 100644 index 000000000..f1ea1b197 --- /dev/null +++ b/src/libexpr/tests/json.cc @@ -0,0 +1,68 @@ +#include "libexprtests.hh" +#include "value-to-json.hh" + +namespace nix { +// Testing the conversion to JSON + + class JSONValueTest : public LibExprTest { + protected: + std::string getJSONValue(Value& value) { + std::stringstream ss; + PathSet ps; + printValueAsJSON(state, true, value, noPos, ss, ps); + return ss.str(); + } + }; + + TEST_F(JSONValueTest, null) { + Value v; + v.mkNull(); + ASSERT_EQ(getJSONValue(v), "null"); + } + + TEST_F(JSONValueTest, BoolFalse) { + Value v; + v.mkBool(false); + ASSERT_EQ(getJSONValue(v),"false"); + } + + TEST_F(JSONValueTest, BoolTrue) { + Value v; + v.mkBool(true); + ASSERT_EQ(getJSONValue(v), "true"); + } + + TEST_F(JSONValueTest, IntPositive) { + Value v; + v.mkInt(100); + ASSERT_EQ(getJSONValue(v), "100"); + } + + TEST_F(JSONValueTest, IntNegative) { + Value v; + v.mkInt(-100); + ASSERT_EQ(getJSONValue(v), "-100"); + } + + TEST_F(JSONValueTest, String) { + Value v; + v.mkString("test"); + ASSERT_EQ(getJSONValue(v), "\"test\""); + } + + TEST_F(JSONValueTest, StringQuotes) { + Value v; + + v.mkString("test\""); + ASSERT_EQ(getJSONValue(v), "\"test\\\"\""); + } + + // The dummy store doesn't support writing files. Fails with this exception message: + // C++ exception with description "error: operation 'addToStoreFromDump' is + // not supported by store 'dummy'" thrown in the test body. + TEST_F(JSONValueTest, DISABLED_Path) { + Value v; + v.mkPath("test"); + ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\""); + } +} /* namespace nix */ diff --git a/src/libexpr/tests/libexprtests.hh b/src/libexpr/tests/libexprtests.hh new file mode 100644 index 000000000..4f6915882 --- /dev/null +++ b/src/libexpr/tests/libexprtests.hh @@ -0,0 +1,136 @@ +#include <gtest/gtest.h> +#include <gmock/gmock.h> + +#include "value.hh" +#include "nixexpr.hh" +#include "eval.hh" +#include "eval-inline.hh" +#include "store-api.hh" + + +namespace nix { + class LibExprTest : public ::testing::Test { + public: + static void SetUpTestSuite() { + initGC(); + } + + protected: + LibExprTest() + : store(openStore("dummy://")) + , state({}, store) + { + } + Value eval(std::string input, bool forceValue = true) { + Value v; + Expr * e = state.parseExprFromString(input, ""); + assert(e); + state.eval(e, v); + if (forceValue) + state.forceValue(v, noPos); + return v; + } + + Symbol createSymbol(const char * value) { + return state.symbols.create(value); + } + + ref<Store> store; + EvalState state; + }; + + MATCHER(IsListType, "") { + return arg != nList; + } + + MATCHER(IsList, "") { + return arg.type() == nList; + } + + MATCHER(IsString, "") { + return arg.type() == nString; + } + + MATCHER(IsNull, "") { + return arg.type() == nNull; + } + + MATCHER(IsThunk, "") { + return arg.type() == nThunk; + } + + MATCHER(IsAttrs, "") { + return arg.type() == nAttrs; + } + + MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) { + if (arg.type() != nString) { + return false; + } + return std::string_view(arg.string.s) == s; + } + + MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) { + if (arg.type() != nInt) { + return false; + } + return arg.integer == v; + } + + MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) { + if (arg.type() != nFloat) { + return false; + } + return arg.fpoint == v; + } + + MATCHER(IsTrue, "") { + if (arg.type() != nBool) { + return false; + } + return arg.boolean == true; + } + + MATCHER(IsFalse, "") { + if (arg.type() != nBool) { + return false; + } + return arg.boolean == false; + } + + MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) { + if (arg.type() != nPath) { + *result_listener << "Expected a path got " << arg.type(); + return false; + } else if (std::string_view(arg.string.s) != p) { + *result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s; + return false; + } + return true; + } + + + MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) { + if (arg.type() != nList) { + *result_listener << "Expected list got " << arg.type(); + return false; + } else if (arg.listSize() != (size_t)n) { + *result_listener << "Expected as list of size " << n << " got " << arg.listSize(); + return false; + } + return true; + } + + MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) { + if (arg.type() != nAttrs) { + *result_listener << "Expexted set got " << arg.type(); + return false; + } else if (arg.attrs->size() != (size_t)n) { + *result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size(); + return false; + } + return true; + } + + +} /* namespace nix */ diff --git a/src/libexpr/tests/local.mk b/src/libexpr/tests/local.mk new file mode 100644 index 000000000..b95980cab --- /dev/null +++ b/src/libexpr/tests/local.mk @@ -0,0 +1,15 @@ +check: libexpr-tests_RUN + +programs += libexpr-tests + +libexpr-tests_DIR := $(d) + +libexpr-tests_INSTALL_DIR := + +libexpr-tests_SOURCES := $(wildcard $(d)/*.cc) + +libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests + +libexpr-tests_LIBS = libexpr libutil libstore libfetchers + +libexpr-tests_LDFLAGS := $(GTEST_LIBS) -lgmock diff --git a/src/libexpr/tests/primops.cc b/src/libexpr/tests/primops.cc new file mode 100644 index 000000000..16cf66d2c --- /dev/null +++ b/src/libexpr/tests/primops.cc @@ -0,0 +1,839 @@ +#include <gmock/gmock.h> +#include <gtest/gtest.h> + +#include "libexprtests.hh" + +namespace nix { + class CaptureLogger : public Logger + { + std::ostringstream oss; + + public: + CaptureLogger() {} + + std::string get() const { + return oss.str(); + } + + void log(Verbosity lvl, const FormatOrString & fs) override { + oss << fs.s << std::endl; + } + + void logEI(const ErrorInfo & ei) override { + showErrorInfo(oss, ei, loggerSettings.showTrace.get()); + } + }; + + class CaptureLogging { + Logger * oldLogger; + std::unique_ptr<CaptureLogger> tempLogger; + public: + CaptureLogging() : tempLogger(std::make_unique<CaptureLogger>()) { + oldLogger = logger; + logger = tempLogger.get(); + } + + ~CaptureLogging() { + logger = oldLogger; + } + + std::string get() const { + return tempLogger->get(); + } + }; + + + // Testing eval of PrimOp's + class PrimOpTest : public LibExprTest {}; + + + TEST_F(PrimOpTest, throw) { + ASSERT_THROW(eval("throw \"foo\""), ThrownError); + } + + TEST_F(PrimOpTest, abort) { + ASSERT_THROW(eval("abort \"abort\""), Abort); + } + + TEST_F(PrimOpTest, ceil) { + auto v = eval("builtins.ceil 1.9"); + ASSERT_THAT(v, IsIntEq(2)); + } + + TEST_F(PrimOpTest, floor) { + auto v = eval("builtins.floor 1.9"); + ASSERT_THAT(v, IsIntEq(1)); + } + + TEST_F(PrimOpTest, tryEvalFailure) { + auto v = eval("builtins.tryEval (throw \"\")"); + ASSERT_THAT(v, IsAttrsOfSize(2)); + auto s = createSymbol("success"); + auto p = v.attrs->get(s); + ASSERT_NE(p, nullptr); + ASSERT_THAT(*p->value, IsFalse()); + } + + TEST_F(PrimOpTest, tryEvalSuccess) { + auto v = eval("builtins.tryEval 123"); + ASSERT_THAT(v, IsAttrs()); + auto s = createSymbol("success"); + auto p = v.attrs->get(s); + ASSERT_NE(p, nullptr); + ASSERT_THAT(*p->value, IsTrue()); + s = createSymbol("value"); + p = v.attrs->get(s); + ASSERT_NE(p, nullptr); + ASSERT_THAT(*p->value, IsIntEq(123)); + } + + TEST_F(PrimOpTest, getEnv) { + setenv("_NIX_UNIT_TEST_ENV_VALUE", "test value", 1); + auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\""); + ASSERT_THAT(v, IsStringEq("test value")); + } + + TEST_F(PrimOpTest, seq) { + ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError); + } + + TEST_F(PrimOpTest, seqNotDeep) { + auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }"); + ASSERT_THAT(v, IsAttrs()); + } + + TEST_F(PrimOpTest, deepSeq) { + ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError); + } + + TEST_F(PrimOpTest, trace) { + CaptureLogging l; + auto v = eval("builtins.trace \"test string 123\" 123"); + ASSERT_THAT(v, IsIntEq(123)); + auto text = l.get(); + ASSERT_NE(text.find("test string 123"), std::string::npos); + } + + TEST_F(PrimOpTest, placeholder) { + auto v = eval("builtins.placeholder \"out\""); + ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")); + } + + TEST_F(PrimOpTest, baseNameOf) { + auto v = eval("builtins.baseNameOf /some/path"); + ASSERT_THAT(v, IsStringEq("path")); + } + + TEST_F(PrimOpTest, dirOf) { + auto v = eval("builtins.dirOf /some/path"); + ASSERT_THAT(v, IsPathEq("/some")); + } + + TEST_F(PrimOpTest, attrValues) { + auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }"); + ASSERT_THAT(v, IsListOfSize(2)); + ASSERT_THAT(*v.listElems()[0], IsIntEq(1)); + ASSERT_THAT(*v.listElems()[1], IsStringEq("foo")); + } + + TEST_F(PrimOpTest, getAttr) { + auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }"); + ASSERT_THAT(v, IsStringEq("foo")); + } + + TEST_F(PrimOpTest, getAttrNotFound) { + // FIXME: TypeError is really bad here, also the error wording is worse + // than on Nix <=2.3 + ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError); + } + + TEST_F(PrimOpTest, unsafeGetAttrPos) { + // The `y` attribute is at position + const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }"; + auto v = eval(expr); + ASSERT_THAT(v, IsAttrsOfSize(3)); + + auto file = v.attrs->find(createSymbol("file")); + ASSERT_NE(file, nullptr); + // FIXME: The file when running these tests is the input string?!? + ASSERT_THAT(*file->value, IsStringEq(expr)); + + auto line = v.attrs->find(createSymbol("line")); + ASSERT_NE(line, nullptr); + ASSERT_THAT(*line->value, IsIntEq(1)); + + auto column = v.attrs->find(createSymbol("column")); + ASSERT_NE(column, nullptr); + ASSERT_THAT(*column->value, IsIntEq(33)); + } + + TEST_F(PrimOpTest, hasAttr) { + auto v = eval("builtins.hasAttr \"x\" { x = 1; }"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(PrimOpTest, hasAttrNotFound) { + auto v = eval("builtins.hasAttr \"x\" { }"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(PrimOpTest, isAttrs) { + auto v = eval("builtins.isAttrs {}"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(PrimOpTest, isAttrsFalse) { + auto v = eval("builtins.isAttrs null"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(PrimOpTest, removeAttrs) { + auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]"); + ASSERT_THAT(v, IsAttrsOfSize(0)); + } + + TEST_F(PrimOpTest, removeAttrsRetains) { + auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + ASSERT_NE(v.attrs->find(createSymbol("y")), nullptr); + } + + TEST_F(PrimOpTest, listToAttrsEmptyList) { + auto v = eval("builtins.listToAttrs []"); + ASSERT_THAT(v, IsAttrsOfSize(0)); + ASSERT_EQ(v.type(), nAttrs); + ASSERT_EQ(v.attrs->size(), 0); + } + + TEST_F(PrimOpTest, listToAttrsNotFieldName) { + ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error); + } + + TEST_F(PrimOpTest, listToAttrs) { + auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + auto key = v.attrs->find(createSymbol("key")); + ASSERT_NE(key, nullptr); + ASSERT_THAT(*key->value, IsIntEq(123)); + } + + TEST_F(PrimOpTest, intersectAttrs) { + auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + auto b = v.attrs->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(3)); + } + + TEST_F(PrimOpTest, catAttrs) { + auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]"); + ASSERT_THAT(v, IsListOfSize(2)); + ASSERT_THAT(*v.listElems()[0], IsIntEq(1)); + ASSERT_THAT(*v.listElems()[1], IsIntEq(2)); + } + + TEST_F(PrimOpTest, functionArgs) { + auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)"); + ASSERT_THAT(v, IsAttrsOfSize(2)); + + auto x = v.attrs->find(createSymbol("x")); + ASSERT_NE(x, nullptr); + ASSERT_THAT(*x->value, IsFalse()); + + auto y = v.attrs->find(createSymbol("y")); + ASSERT_NE(y, nullptr); + ASSERT_THAT(*y->value, IsTrue()); + } + + TEST_F(PrimOpTest, mapAttrs) { + auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }"); + ASSERT_THAT(v, IsAttrsOfSize(2)); + + auto a = v.attrs->find(createSymbol("a")); + ASSERT_NE(a, nullptr); + ASSERT_THAT(*a->value, IsThunk()); + state.forceValue(*a->value, noPos); + ASSERT_THAT(*a->value, IsIntEq(10)); + + auto b = v.attrs->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsThunk()); + state.forceValue(*b->value, noPos); + ASSERT_THAT(*b->value, IsIntEq(20)); + } + + TEST_F(PrimOpTest, isList) { + auto v = eval("builtins.isList []"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(PrimOpTest, isListFalse) { + auto v = eval("builtins.isList null"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(PrimOpTest, elemtAt) { + auto v = eval("builtins.elemAt [0 1 2 3] 3"); + ASSERT_THAT(v, IsIntEq(3)); + } + + TEST_F(PrimOpTest, elemtAtOutOfBounds) { + ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error); + } + + TEST_F(PrimOpTest, head) { + auto v = eval("builtins.head [ 3 2 1 0 ]"); + ASSERT_THAT(v, IsIntEq(3)); + } + + TEST_F(PrimOpTest, headEmpty) { + ASSERT_THROW(eval("builtins.head [ ]"), Error); + } + + TEST_F(PrimOpTest, headWrongType) { + ASSERT_THROW(eval("builtins.head { }"), Error); + } + + TEST_F(PrimOpTest, tail) { + auto v = eval("builtins.tail [ 3 2 1 0 ]"); + ASSERT_THAT(v, IsListOfSize(3)); + for (const auto [n, elem] : enumerate(v.listItems())) + ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n))); + } + + TEST_F(PrimOpTest, tailEmpty) { + ASSERT_THROW(eval("builtins.tail []"), Error); + } + + TEST_F(PrimOpTest, map) { + auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]"); + ASSERT_THAT(v, IsListOfSize(3)); + auto elem = v.listElems()[0]; + ASSERT_THAT(*elem, IsThunk()); + state.forceValue(*elem, noPos); + ASSERT_THAT(*elem, IsStringEq("foobar")); + + elem = v.listElems()[1]; + ASSERT_THAT(*elem, IsThunk()); + state.forceValue(*elem, noPos); + ASSERT_THAT(*elem, IsStringEq("foobla")); + + elem = v.listElems()[2]; + ASSERT_THAT(*elem, IsThunk()); + state.forceValue(*elem, noPos); + ASSERT_THAT(*elem, IsStringEq("fooabc")); + } + + TEST_F(PrimOpTest, filter) { + auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]"); + ASSERT_THAT(v, IsListOfSize(3)); + for (const auto elem : v.listItems()) + ASSERT_THAT(*elem, IsIntEq(2)); + } + + TEST_F(PrimOpTest, elemTrue) { + auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(PrimOpTest, elemFalse) { + auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(PrimOpTest, concatLists) { + auto v = eval("builtins.concatLists [[1 2] [3 4]]"); + ASSERT_THAT(v, IsListOfSize(4)); + for (const auto [i, elem] : enumerate(v.listItems())) + ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1)); + } + + TEST_F(PrimOpTest, length) { + auto v = eval("builtins.length [ 1 2 3 ]"); + ASSERT_THAT(v, IsIntEq(3)); + } + + TEST_F(PrimOpTest, foldStrict) { + auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]"); + ASSERT_THAT(v, IsIntEq(6)); + } + + TEST_F(PrimOpTest, anyTrue) { + auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(PrimOpTest, anyFalse) { + auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(PrimOpTest, allTrue) { + auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(PrimOpTest, allFalse) { + auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(PrimOpTest, genList) { + auto v = eval("builtins.genList (x: x + 1) 3"); + ASSERT_EQ(v.type(), nList); + ASSERT_EQ(v.listSize(), 3); + for (const auto [i, elem] : enumerate(v.listItems())) { + ASSERT_THAT(*elem, IsThunk()); + state.forceValue(*elem, noPos); + ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1)); + } + } + + TEST_F(PrimOpTest, sortLessThan) { + auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]"); + ASSERT_EQ(v.type(), nList); + ASSERT_EQ(v.listSize(), 6); + + const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 }; + for (const auto [n, elem] : enumerate(v.listItems())) + ASSERT_THAT(*elem, IsIntEq(numbers[n])); + } + + TEST_F(PrimOpTest, partition) { + auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]"); + ASSERT_THAT(v, IsAttrsOfSize(2)); + + auto right = v.attrs->get(createSymbol("right")); + ASSERT_NE(right, nullptr); + ASSERT_THAT(*right->value, IsListOfSize(2)); + ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23)); + ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42)); + + auto wrong = v.attrs->get(createSymbol("wrong")); + ASSERT_NE(wrong, nullptr); + ASSERT_EQ(wrong->value->type(), nList); + ASSERT_EQ(wrong->value->listSize(), 3); + ASSERT_THAT(*wrong->value, IsListOfSize(3)); + ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1)); + ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9)); + ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3)); + } + + TEST_F(PrimOpTest, concatMap) { + auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]"); + ASSERT_EQ(v.type(), nList); + ASSERT_EQ(v.listSize(), 6); + + const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 }; + for (const auto [n, elem] : enumerate(v.listItems())) + ASSERT_THAT(*elem, IsIntEq(numbers[n])); + } + + TEST_F(PrimOpTest, addInt) { + auto v = eval("builtins.add 3 5"); + ASSERT_THAT(v, IsIntEq(8)); + } + + TEST_F(PrimOpTest, addFloat) { + auto v = eval("builtins.add 3.0 5.0"); + ASSERT_THAT(v, IsFloatEq(8.0)); + } + + TEST_F(PrimOpTest, addFloatToInt) { + auto v = eval("builtins.add 3.0 5"); + ASSERT_THAT(v, IsFloatEq(8.0)); + + v = eval("builtins.add 3 5.0"); + ASSERT_THAT(v, IsFloatEq(8.0)); + } + + TEST_F(PrimOpTest, subInt) { + auto v = eval("builtins.sub 5 2"); + ASSERT_THAT(v, IsIntEq(3)); + } + + TEST_F(PrimOpTest, subFloat) { + auto v = eval("builtins.sub 5.0 2.0"); + ASSERT_THAT(v, IsFloatEq(3.0)); + } + + TEST_F(PrimOpTest, subFloatFromInt) { + auto v = eval("builtins.sub 5.0 2"); + ASSERT_THAT(v, IsFloatEq(3.0)); + + v = eval("builtins.sub 4 2.0"); + ASSERT_THAT(v, IsFloatEq(2.0)); + } + + TEST_F(PrimOpTest, mulInt) { + auto v = eval("builtins.mul 3 5"); + ASSERT_THAT(v, IsIntEq(15)); + } + + TEST_F(PrimOpTest, mulFloat) { + auto v = eval("builtins.mul 3.0 5.0"); + ASSERT_THAT(v, IsFloatEq(15.0)); + } + + TEST_F(PrimOpTest, mulFloatMixed) { + auto v = eval("builtins.mul 3 5.0"); + ASSERT_THAT(v, IsFloatEq(15.0)); + + v = eval("builtins.mul 2.0 5"); + ASSERT_THAT(v, IsFloatEq(10.0)); + } + + TEST_F(PrimOpTest, divInt) { + auto v = eval("builtins.div 5 (-1)"); + ASSERT_THAT(v, IsIntEq(-5)); + } + + TEST_F(PrimOpTest, divIntZero) { + ASSERT_THROW(eval("builtins.div 5 0"), EvalError); + } + + TEST_F(PrimOpTest, divFloat) { + auto v = eval("builtins.div 5.0 (-1)"); + ASSERT_THAT(v, IsFloatEq(-5.0)); + } + + TEST_F(PrimOpTest, divFloatZero) { + ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError); + } + + TEST_F(PrimOpTest, bitOr) { + auto v = eval("builtins.bitOr 1 2"); + ASSERT_THAT(v, IsIntEq(3)); + } + + TEST_F(PrimOpTest, bitXor) { + auto v = eval("builtins.bitXor 3 2"); + ASSERT_THAT(v, IsIntEq(1)); + } + + TEST_F(PrimOpTest, lessThanFalse) { + auto v = eval("builtins.lessThan 3 1"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(PrimOpTest, lessThanTrue) { + auto v = eval("builtins.lessThan 1 3"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(PrimOpTest, toStringAttrsThrows) { + ASSERT_THROW(eval("builtins.toString {}"), EvalError); + } + + TEST_F(PrimOpTest, toStringLambdaThrows) { + ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError); + } + + class ToStringPrimOpTest : + public PrimOpTest, + public testing::WithParamInterface<std::tuple<std::string, std::string_view>> + {}; + + TEST_P(ToStringPrimOpTest, toString) { + const auto [input, output] = GetParam(); + auto v = eval(input); + ASSERT_THAT(v, IsStringEq(output)); + } + +#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output))) + INSTANTIATE_TEST_SUITE_P( + toString, + ToStringPrimOpTest, + testing::Values( + CASE(R"("foo")", "foo"), + CASE(R"(1)", "1"), + CASE(R"([1 2 3])", "1 2 3"), + CASE(R"(.123)", "0.123000"), + CASE(R"(true)", "1"), + CASE(R"(false)", ""), + CASE(R"(null)", ""), + CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"), + CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"), + CASE(R"({ outPath = "foo"; })", "foo"), + CASE(R"(./test)", "/test") + ) + ); +#undef CASE + + TEST_F(PrimOpTest, substring){ + auto v = eval("builtins.substring 0 3 \"nixos\""); + ASSERT_THAT(v, IsStringEq("nix")); + } + + TEST_F(PrimOpTest, substringSmallerString){ + auto v = eval("builtins.substring 0 3 \"n\""); + ASSERT_THAT(v, IsStringEq("n")); + } + + TEST_F(PrimOpTest, substringEmptyString){ + auto v = eval("builtins.substring 1 3 \"\""); + ASSERT_THAT(v, IsStringEq("")); + } + + TEST_F(PrimOpTest, stringLength) { + auto v = eval("builtins.stringLength \"123\""); + ASSERT_THAT(v, IsIntEq(3)); + } + TEST_F(PrimOpTest, hashStringMd5) { + auto v = eval("builtins.hashString \"md5\" \"asdf\""); + ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570")); + } + + TEST_F(PrimOpTest, hashStringSha1) { + auto v = eval("builtins.hashString \"sha1\" \"asdf\""); + ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c")); + } + + TEST_F(PrimOpTest, hashStringSha256) { + auto v = eval("builtins.hashString \"sha256\" \"asdf\""); + ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b")); + } + + TEST_F(PrimOpTest, hashStringSha512) { + auto v = eval("builtins.hashString \"sha512\" \"asdf\""); + ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1")); + } + + TEST_F(PrimOpTest, hashStringInvalidHashType) { + ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error); + } + + TEST_F(PrimOpTest, nixPath) { + auto v = eval("builtins.nixPath"); + ASSERT_EQ(v.type(), nList); + // We can't test much more as currently the EvalSettings are a global + // that we can't easily swap / replace + } + + TEST_F(PrimOpTest, langVersion) { + auto v = eval("builtins.langVersion"); + ASSERT_EQ(v.type(), nInt); + } + + TEST_F(PrimOpTest, storeDir) { + auto v = eval("builtins.storeDir"); + ASSERT_THAT(v, IsStringEq("/nix/store")); + } + + TEST_F(PrimOpTest, nixVersion) { + auto v = eval("builtins.nixVersion"); + ASSERT_THAT(v, IsStringEq(nixVersion)); + } + + TEST_F(PrimOpTest, currentSystem) { + auto v = eval("builtins.currentSystem"); + ASSERT_THAT(v, IsStringEq(settings.thisSystem.get())); + } + + TEST_F(PrimOpTest, derivation) { + auto v = eval("derivation"); + ASSERT_EQ(v.type(), nFunction); + ASSERT_TRUE(v.isLambda()); + ASSERT_NE(v.lambda.fun, nullptr); + ASSERT_TRUE(v.lambda.fun->hasFormals()); + } + + TEST_F(PrimOpTest, currentTime) { + auto v = eval("builtins.currentTime"); + ASSERT_EQ(v.type(), nInt); + ASSERT_TRUE(v.integer > 0); + } + + TEST_F(PrimOpTest, splitVersion) { + auto v = eval("builtins.splitVersion \"1.2.3git\""); + ASSERT_THAT(v, IsListOfSize(4)); + + const std::vector<std::string_view> strings = { "1", "2", "3", "git" }; + for (const auto [n, p] : enumerate(v.listItems())) + ASSERT_THAT(*p, IsStringEq(strings[n])); + } + + class CompareVersionsPrimOpTest : + public PrimOpTest, + public testing::WithParamInterface<std::tuple<std::string, const int>> + {}; + + TEST_P(CompareVersionsPrimOpTest, compareVersions) { + auto [expression, expectation] = GetParam(); + auto v = eval(expression); + ASSERT_THAT(v, IsIntEq(expectation)); + } + +#define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected)) + INSTANTIATE_TEST_SUITE_P( + compareVersions, + CompareVersionsPrimOpTest, + testing::Values( + // The first two are weird cases. Intuition tells they should + // be the same but they aren't. + CASE(1.0, 1.0.0, -1), + CASE(1.0.0, 1.0, 1), + // the following are from the nix-env manual: + CASE(1.0, 2.3, -1), + CASE(2.1, 2.3, -1), + CASE(2.3, 2.3, 0), + CASE(2.5, 2.3, 1), + CASE(3.1, 2.3, 1), + CASE(2.3.1, 2.3, 1), + CASE(2.3.1, 2.3a, 1), + CASE(2.3pre1, 2.3, -1), + CASE(2.3pre3, 2.3pre12, -1), + CASE(2.3a, 2.3c, -1), + CASE(2.3pre1, 2.3c, -1), + CASE(2.3pre1, 2.3q, -1) + ) + ); +#undef CASE + + + class ParseDrvNamePrimOpTest : + public PrimOpTest, + public testing::WithParamInterface<std::tuple<std::string, std::string_view, std::string_view>> + {}; + + TEST_P(ParseDrvNamePrimOpTest, parseDrvName) { + auto [input, expectedName, expectedVersion] = GetParam(); + const auto expr = fmt("builtins.parseDrvName \"%1%\"", input); + auto v = eval(expr); + ASSERT_THAT(v, IsAttrsOfSize(2)); + + auto name = v.attrs->find(createSymbol("name")); + ASSERT_TRUE(name); + ASSERT_THAT(*name->value, IsStringEq(expectedName)); + + auto version = v.attrs->find(createSymbol("version")); + ASSERT_TRUE(version); + ASSERT_THAT(*version->value, IsStringEq(expectedVersion)); + } + + INSTANTIATE_TEST_SUITE_P( + parseDrvName, + ParseDrvNamePrimOpTest, + testing::Values( + std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"), + std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git") + ) + ); + + TEST_F(PrimOpTest, replaceStrings) { + // FIXME: add a test that verifies the string context is as expected + auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\""); + ASSERT_EQ(v.type(), nString); + ASSERT_EQ(v.string.s, std::string_view("fabir")); + } + + TEST_F(PrimOpTest, concatStringsSep) { + // FIXME: add a test that verifies the string context is as expected + auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]"); + ASSERT_EQ(v.type(), nString); + ASSERT_EQ(std::string_view(v.string.s), "foo%bar%baz"); + } + + TEST_F(PrimOpTest, split1) { + // v = [ "" [ "a" ] "c" ] + auto v = eval("builtins.split \"(a)b\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(3)); + + ASSERT_THAT(*v.listElems()[0], IsStringEq("")); + + ASSERT_THAT(*v.listElems()[1], IsListOfSize(1)); + ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a")); + + ASSERT_THAT(*v.listElems()[2], IsStringEq("c")); + } + + TEST_F(PrimOpTest, split2) { + // v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ] + auto v = eval("builtins.split \"([ac])\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(5)); + + ASSERT_THAT(*v.listElems()[0], IsStringEq("")); + + ASSERT_THAT(*v.listElems()[1], IsListOfSize(1)); + ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a")); + + ASSERT_THAT(*v.listElems()[2], IsStringEq("b")); + + ASSERT_THAT(*v.listElems()[3], IsListOfSize(1)); + ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c")); + + ASSERT_THAT(*v.listElems()[4], IsStringEq("")); + } + + TEST_F(PrimOpTest, split3) { + auto v = eval("builtins.split \"(a)|(c)\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(5)); + + // First list element + ASSERT_THAT(*v.listElems()[0], IsStringEq("")); + + // 2nd list element is a list [ "" null ] + ASSERT_THAT(*v.listElems()[1], IsListOfSize(2)); + ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a")); + ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull()); + + // 3rd element + ASSERT_THAT(*v.listElems()[2], IsStringEq("b")); + + // 4th element is a list: [ null "c" ] + ASSERT_THAT(*v.listElems()[3], IsListOfSize(2)); + ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull()); + ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c")); + + // 5th element is the empty string + ASSERT_THAT(*v.listElems()[4], IsStringEq("")); + } + + TEST_F(PrimOpTest, split4) { + auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \""); + ASSERT_THAT(v, IsListOfSize(3)); + auto first = v.listElems()[0]; + auto second = v.listElems()[1]; + auto third = v.listElems()[2]; + + ASSERT_THAT(*first, IsStringEq(" ")); + + ASSERT_THAT(*second, IsListOfSize(1)); + ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO")); + + ASSERT_THAT(*third, IsStringEq(" ")); + } + + TEST_F(PrimOpTest, match1) { + auto v = eval("builtins.match \"ab\" \"abc\""); + ASSERT_THAT(v, IsNull()); + } + + TEST_F(PrimOpTest, match2) { + auto v = eval("builtins.match \"abc\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(0)); + } + + TEST_F(PrimOpTest, match3) { + auto v = eval("builtins.match \"a(b)(c)\" \"abc\""); + ASSERT_THAT(v, IsListOfSize(2)); + ASSERT_THAT(*v.listElems()[0], IsStringEq("b")); + ASSERT_THAT(*v.listElems()[1], IsStringEq("c")); + } + + TEST_F(PrimOpTest, match4) { + auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \""); + ASSERT_THAT(v, IsListOfSize(1)); + ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO")); + } + + TEST_F(PrimOpTest, attrNames) { + auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }"); + ASSERT_THAT(v, IsListOfSize(4)); + + // ensure that the list is sorted + const std::vector<std::string_view> expected { "a", "x", "y", "z" }; + for (const auto [n, elem] : enumerate(v.listItems())) + ASSERT_THAT(*elem, IsStringEq(expected[n])); + } +} /* namespace nix */ diff --git a/src/libexpr/tests/trivial.cc b/src/libexpr/tests/trivial.cc new file mode 100644 index 000000000..8ce276e52 --- /dev/null +++ b/src/libexpr/tests/trivial.cc @@ -0,0 +1,196 @@ +#include "libexprtests.hh" + +namespace nix { + // Testing of trivial expressions + class TrivialExpressionTest : public LibExprTest {}; + + TEST_F(TrivialExpressionTest, true) { + auto v = eval("true"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(TrivialExpressionTest, false) { + auto v = eval("false"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(TrivialExpressionTest, null) { + auto v = eval("null"); + ASSERT_THAT(v, IsNull()); + } + + TEST_F(TrivialExpressionTest, 1) { + auto v = eval("1"); + ASSERT_THAT(v, IsIntEq(1)); + } + + TEST_F(TrivialExpressionTest, 1plus1) { + auto v = eval("1+1"); + ASSERT_THAT(v, IsIntEq(2)); + } + + TEST_F(TrivialExpressionTest, minus1) { + auto v = eval("-1"); + ASSERT_THAT(v, IsIntEq(-1)); + } + + TEST_F(TrivialExpressionTest, 1minus1) { + auto v = eval("1-1"); + ASSERT_THAT(v, IsIntEq(0)); + } + + TEST_F(TrivialExpressionTest, lambdaAdd) { + auto v = eval("let add = a: b: a + b; in add 1 2"); + ASSERT_THAT(v, IsIntEq(3)); + } + + TEST_F(TrivialExpressionTest, list) { + auto v = eval("[]"); + ASSERT_THAT(v, IsListOfSize(0)); + } + + TEST_F(TrivialExpressionTest, attrs) { + auto v = eval("{}"); + ASSERT_THAT(v, IsAttrsOfSize(0)); + } + + TEST_F(TrivialExpressionTest, float) { + auto v = eval("1.234"); + ASSERT_THAT(v, IsFloatEq(1.234)); + } + + TEST_F(TrivialExpressionTest, updateAttrs) { + auto v = eval("{ a = 1; } // { b = 2; a = 3; }"); + ASSERT_THAT(v, IsAttrsOfSize(2)); + auto a = v.attrs->find(createSymbol("a")); + ASSERT_NE(a, nullptr); + ASSERT_THAT(*a->value, IsIntEq(3)); + + auto b = v.attrs->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(2)); + } + + TEST_F(TrivialExpressionTest, hasAttrOpFalse) { + auto v = eval("{} ? a"); + ASSERT_THAT(v, IsFalse()); + } + + TEST_F(TrivialExpressionTest, hasAttrOpTrue) { + auto v = eval("{ a = 123; } ? a"); + ASSERT_THAT(v, IsTrue()); + } + + TEST_F(TrivialExpressionTest, withFound) { + auto v = eval("with { a = 23; }; a"); + ASSERT_THAT(v, IsIntEq(23)); + } + + TEST_F(TrivialExpressionTest, withNotFound) { + ASSERT_THROW(eval("with {}; a"), Error); + } + + TEST_F(TrivialExpressionTest, withOverride) { + auto v = eval("with { a = 23; }; with { a = 42; }; a"); + ASSERT_THAT(v, IsIntEq(42)); + } + + TEST_F(TrivialExpressionTest, letOverWith) { + auto v = eval("let a = 23; in with { a = 1; }; a"); + ASSERT_THAT(v, IsIntEq(23)); + } + + TEST_F(TrivialExpressionTest, multipleLet) { + auto v = eval("let a = 23; in let a = 42; in a"); + ASSERT_THAT(v, IsIntEq(42)); + } + + TEST_F(TrivialExpressionTest, defaultFunctionArgs) { + auto v = eval("({ a ? 123 }: a) {}"); + ASSERT_THAT(v, IsIntEq(123)); + } + + TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) { + auto v = eval("({ a ? 123 }: a) { a = 5; }"); + ASSERT_THAT(v, IsIntEq(5)); + } + + TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) { + auto v = eval("({ a ? 123 }@args: args) {}"); + ASSERT_THAT(v, IsAttrsOfSize(0)); + } + + TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) { + auto v = eval("(args@{ a ? 123 }: args) {}"); + ASSERT_THAT(v, IsAttrsOfSize(0)); + } + + TEST_F(TrivialExpressionTest, assertThrows) { + ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error); + } + + TEST_F(TrivialExpressionTest, assertPassed) { + auto v = eval("let x = arg: assert arg == 1; 123; in x 1"); + ASSERT_THAT(v, IsIntEq(123)); + } + + class AttrSetMergeTrvialExpressionTest : + public TrivialExpressionTest, + public testing::WithParamInterface<const char*> + {}; + + TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) { + // Usually Nix rejects duplicate keys in an attrset but it does allow + // so if it is an attribute set that contains disjoint sets of keys. + // The below is equivalent to `{a.b = 1; a.c = 2; }`. + // The attribute set `a` will be a Thunk at first as the attribuets + // have to be merged (or otherwise computed) and that is done in a lazy + // manner. + + auto expr = GetParam(); + auto v = eval(expr); + ASSERT_THAT(v, IsAttrsOfSize(1)); + + auto a = v.attrs->find(createSymbol("a")); + ASSERT_NE(a, nullptr); + + ASSERT_THAT(*a->value, IsThunk()); + state.forceValue(*a->value, noPos); + + ASSERT_THAT(*a->value, IsAttrsOfSize(2)); + + auto b = a->value->attrs->find(createSymbol("b")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(1)); + + auto c = a->value->attrs->find(createSymbol("c")); + ASSERT_NE(c, nullptr); + ASSERT_THAT(*c->value, IsIntEq(2)); + } + + INSTANTIATE_TEST_SUITE_P( + attrsetMergeLazy, + AttrSetMergeTrvialExpressionTest, + testing::Values( + "{ a.b = 1; a.c = 2; }", + "{ a = { b = 1; }; a = { c = 2; }; }" + ) + ); + + TEST_F(TrivialExpressionTest, functor) { + auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5"); + ASSERT_THAT(v, IsIntEq(15)); + } + + TEST_F(TrivialExpressionTest, bindOr) { + auto v = eval("{ or = 1; }"); + ASSERT_THAT(v, IsAttrsOfSize(1)); + auto b = v.attrs->find(createSymbol("or")); + ASSERT_NE(b, nullptr); + ASSERT_THAT(*b->value, IsIntEq(1)); + } + + TEST_F(TrivialExpressionTest, orCantBeUsed) { + ASSERT_THROW(eval("let or = 1; in or"), Error); + } +} /* namespace nix */ diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index 517da4c01..4d63d8b49 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -10,7 +10,7 @@ namespace nix { void printValueAsJSON(EvalState & state, bool strict, - Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context) + Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore) { checkInterrupt(); @@ -32,7 +32,10 @@ void printValueAsJSON(EvalState & state, bool strict, break; case nPath: - out.write(state.copyPathToStore(context, v.path)); + if (copyToStore) + out.write(state.copyPathToStore(context, v.path)); + else + out.write(v.path); break; case nNull: @@ -50,14 +53,14 @@ void printValueAsJSON(EvalState & state, bool strict, auto obj(out.object()); StringSet names; for (auto & j : *v.attrs) - names.insert(j.name); + names.emplace(state.symbols[j.name]); for (auto & j : names) { Attr & a(*v.attrs->find(state.symbols.create(j))); auto placeholder(obj.placeholder(j)); - printValueAsJSON(state, strict, *a.value, *a.pos, placeholder, context); + printValueAsJSON(state, strict, *a.value, a.pos, placeholder, context, copyToStore); } } else - printValueAsJSON(state, strict, *i->value, *i->pos, out, context); + printValueAsJSON(state, strict, *i->value, i->pos, out, context, copyToStore); break; } @@ -65,13 +68,13 @@ void printValueAsJSON(EvalState & state, bool strict, auto list(out.list()); for (auto elem : v.listItems()) { auto placeholder(list.placeholder()); - printValueAsJSON(state, strict, *elem, pos, placeholder, context); + printValueAsJSON(state, strict, *elem, pos, placeholder, context, copyToStore); } break; } case nExternal: - v.external->printValueAsJSON(state, strict, out, context); + v.external->printValueAsJSON(state, strict, out, context, copyToStore); break; case nFloat: @@ -82,23 +85,25 @@ void printValueAsJSON(EvalState & state, bool strict, case nFunction: auto e = TypeError({ .msg = hintfmt("cannot convert %1% to JSON", showType(v)), - .errPos = v.determinePos(pos) + .errPos = state.positions[v.determinePos(pos)] }); - throw e.addTrace(pos, hintfmt("message for the trace")); + e.addTrace(state.positions[pos], hintfmt("message for the trace")); + state.debugThrowLastTrace(e); + throw e; } } void printValueAsJSON(EvalState & state, bool strict, - Value & v, const Pos & pos, std::ostream & str, PathSet & context) + Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore) { JSONPlaceholder out(str); - printValueAsJSON(state, strict, v, pos, out, context); + printValueAsJSON(state, strict, v, pos, out, context, copyToStore); } void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, - JSONPlaceholder & out, PathSet & context) const + JSONPlaceholder & out, PathSet & context, bool copyToStore) const { - throw TypeError("cannot convert %1% to JSON", showType()); + state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType())); } diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh index c2f797b29..7ddc8a5b1 100644 --- a/src/libexpr/value-to-json.hh +++ b/src/libexpr/value-to-json.hh @@ -11,9 +11,9 @@ namespace nix { class JSONPlaceholder; void printValueAsJSON(EvalState & state, bool strict, - Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context); + Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore = true); void printValueAsJSON(EvalState & state, bool strict, - Value & v, const Pos & pos, std::ostream & str, PathSet & context); + Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore = true); } diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index afeaf5694..7c3bf9492 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -19,10 +19,10 @@ static XMLAttrs singletonAttrs(const std::string & name, const std::string & val static void printValueAsXML(EvalState & state, bool strict, bool location, Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen, - const Pos & pos); + const PosIdx pos); -static void posToXML(XMLAttrs & xmlAttrs, const Pos & pos) +static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos) { xmlAttrs["path"] = pos.file; xmlAttrs["line"] = (format("%1%") % pos.line).str(); @@ -36,25 +36,25 @@ static void showAttrs(EvalState & state, bool strict, bool location, StringSet names; for (auto & i : attrs) - names.insert(i.name); + names.emplace(state.symbols[i.name]); for (auto & i : names) { Attr & a(*attrs.find(state.symbols.create(i))); XMLAttrs xmlAttrs; xmlAttrs["name"] = i; - if (location && a.pos != ptr(&noPos)) posToXML(xmlAttrs, *a.pos); + if (location && a.pos) posToXML(state, xmlAttrs, state.positions[a.pos]); XMLOpenElement _(doc, "attr", xmlAttrs); printValueAsXML(state, strict, location, - *a.value, doc, context, drvsSeen, *a.pos); + *a.value, doc, context, drvsSeen, a.pos); } } static void printValueAsXML(EvalState & state, bool strict, bool location, Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen, - const Pos & pos) + const PosIdx pos) { checkInterrupt(); @@ -93,14 +93,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, Path drvPath; a = v.attrs->find(state.sDrvPath); if (a != v.attrs->end()) { - if (strict) state.forceValue(*a->value, *a->pos); + if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) xmlAttrs["drvPath"] = drvPath = a->value->string.s; } a = v.attrs->find(state.sOutPath); if (a != v.attrs->end()) { - if (strict) state.forceValue(*a->value, *a->pos); + if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) xmlAttrs["outPath"] = a->value->string.s; } @@ -134,18 +134,18 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, break; } XMLAttrs xmlAttrs; - if (location) posToXML(xmlAttrs, v.lambda.fun->pos); + if (location) posToXML(state, xmlAttrs, state.positions[v.lambda.fun->pos]); XMLOpenElement _(doc, "function", xmlAttrs); if (v.lambda.fun->hasFormals()) { XMLAttrs attrs; - if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg; + if (v.lambda.fun->arg) attrs["name"] = state.symbols[v.lambda.fun->arg]; if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1"; XMLOpenElement _(doc, "attrspat", attrs); - for (auto & i : v.lambda.fun->formals->lexicographicOrder()) - doc.writeEmptyElement("attr", singletonAttrs("name", i.name)); + for (auto & i : v.lambda.fun->formals->lexicographicOrder(state.symbols)) + doc.writeEmptyElement("attr", singletonAttrs("name", state.symbols[i.name])); } else - doc.writeEmptyElement("varpat", singletonAttrs("name", v.lambda.fun->arg)); + doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.lambda.fun->arg])); break; } @@ -166,14 +166,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, void ExternalValueBase::printValueAsXML(EvalState & state, bool strict, bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen, - const Pos & pos) const + const PosIdx pos) const { doc.writeEmptyElement("unevaluated"); } void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, std::ostream & out, PathSet & context, const Pos & pos) + Value & v, std::ostream & out, PathSet & context, const PosIdx pos) { XMLWriter doc(true, out); XMLOpenElement root(doc, "expr"); diff --git a/src/libexpr/value-to-xml.hh b/src/libexpr/value-to-xml.hh index cc778a2cb..506f32b6b 100644 --- a/src/libexpr/value-to-xml.hh +++ b/src/libexpr/value-to-xml.hh @@ -9,6 +9,6 @@ namespace nix { void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, std::ostream & out, PathSet & context, const Pos & pos); + Value & v, std::ostream & out, PathSet & context, const PosIdx pos); } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 3fdff71a5..590ba7783 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -56,7 +56,10 @@ struct Expr; struct ExprLambda; struct PrimOp; class Symbol; +class PosIdx; struct Pos; +class StorePath; +class Store; class EvalState; class XMLWriter; class JSONPlaceholder; @@ -64,6 +67,8 @@ class JSONPlaceholder; typedef int64_t NixInt; typedef double NixFloat; +typedef std::pair<StorePath, std::string> NixStringContextElem; +typedef std::vector<NixStringContextElem> NixStringContext; /* External values must descend from ExternalValueBase, so that * type-agnostic nix functions (e.g. showType) can be implemented @@ -94,12 +99,12 @@ class ExternalValueBase /* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */ virtual void printValueAsJSON(EvalState & state, bool strict, - JSONPlaceholder & out, PathSet & context) const; + JSONPlaceholder & out, PathSet & context, bool copyToStore = true) const; /* Print the value as XML. Defaults to unevaluated */ virtual void printValueAsXML(EvalState & state, bool strict, bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen, - const Pos & pos) const; + const PosIdx pos) const; virtual ~ExternalValueBase() { @@ -114,11 +119,14 @@ struct Value private: InternalType internalType; -friend std::string showType(const Value & v); -friend void printValue(std::ostream & str, std::set<const Value *> & active, const Value & v); + friend std::string showType(const Value & v); + + void print(const SymbolTable & symbols, std::ostream & str, std::set<const void *> * seen) const; public: + void print(const SymbolTable & symbols, std::ostream & str, bool showRepeated = false) const; + // Functions needed to distinguish the type // These should be removed eventually, by putting the functionality that's // needed by callers into methods of this type @@ -243,11 +251,6 @@ public: void mkStringMove(const char * s, const PathSet & context); - inline void mkString(const Symbol & s) - { - mkString(((const std::string &) s).c_str()); - } - inline void mkPath(const char * s) { clearValue(); @@ -361,14 +364,14 @@ public: return internalType == tList1 ? 1 : internalType == tList2 ? 2 : bigList.size; } - Pos determinePos(const Pos & pos) const; + PosIdx determinePos(const PosIdx pos) const; /* Check whether forcing this value requires a trivial amount of computation. In particular, function applications are non-trivial. */ bool isTrivial() const; - std::vector<std::pair<Path, std::string>> getContext(); + NixStringContext getContext(const Store &); auto listItems() { @@ -401,9 +404,9 @@ public: #if HAVE_BOEHMGC -typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector; -typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap; -typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector> > > ValueVectorMap; +typedef std::vector<Value *, traceable_allocator<Value *>> ValueVector; +typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *>>> ValueMap; +typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector>>> ValueVectorMap; #else typedef std::vector<Value *> ValueVector; typedef std::map<Symbol, Value *> ValueMap; diff --git a/src/libfetchers/fetch-settings.cc b/src/libfetchers/fetch-settings.cc new file mode 100644 index 000000000..e7d5244dc --- /dev/null +++ b/src/libfetchers/fetch-settings.cc @@ -0,0 +1,13 @@ +#include "fetch-settings.hh" + +namespace nix { + +FetchSettings::FetchSettings() +{ +} + +FetchSettings fetchSettings; + +static GlobalConfig::Register rFetchSettings(&fetchSettings); + +} diff --git a/src/libfetchers/fetch-settings.hh b/src/libfetchers/fetch-settings.hh new file mode 100644 index 000000000..6452143a1 --- /dev/null +++ b/src/libfetchers/fetch-settings.hh @@ -0,0 +1,93 @@ +#pragma once + +#include "types.hh" +#include "config.hh" +#include "util.hh" + +#include <map> +#include <limits> + +#include <sys/types.h> + +namespace nix { + +struct FetchSettings : public Config +{ + FetchSettings(); + + Setting<StringMap> accessTokens{this, {}, "access-tokens", + R"( + Access tokens used to access protected GitHub, GitLab, or + other locations requiring token-based authentication. + + Access tokens are specified as a string made up of + space-separated `host=token` values. The specific token + used is selected by matching the `host` portion against the + "host" specification of the input. The actual use of the + `token` value is determined by the type of resource being + accessed: + + * Github: the token value is the OAUTH-TOKEN string obtained + as the Personal Access Token from the Github server (see + https://docs.github.com/en/developers/apps/building-oauth-apps/authorizing-oauth-apps). + + * Gitlab: the token value is either the OAuth2 token or the + Personal Access Token (these are different types tokens + for gitlab, see + https://docs.gitlab.com/12.10/ee/api/README.html#authentication). + The `token` value should be `type:tokenstring` where + `type` is either `OAuth2` or `PAT` to indicate which type + of token is being specified. + + Example `~/.config/nix/nix.conf`: + + ``` + access-tokens = github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk + ``` + + Example `~/code/flake.nix`: + + ```nix + input.foo = { + type = "gitlab"; + host = "gitlab.mycompany.com"; + owner = "mycompany"; + repo = "pro"; + }; + ``` + + This example specifies three tokens, one each for accessing + github.com, gitlab.mycompany.com, and sourceforge.net. + + The `input.foo` uses the "gitlab" fetcher, which might + requires specifying the token type along with the token + value. + )"}; + + Setting<bool> allowDirty{this, true, "allow-dirty", + "Whether to allow dirty Git/Mercurial trees."}; + + Setting<bool> warnDirty{this, true, "warn-dirty", + "Whether to warn about dirty Git/Mercurial trees."}; + + Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry", + "Path or URI of the global flake registry."}; + + Setting<bool> useRegistries{this, true, "use-registries", + "Whether to use flake registries to resolve flake references."}; + + Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config", + "Whether to accept nix configuration from a flake without prompting."}; + + Setting<std::string> commitLockFileSummary{ + this, "", "commit-lockfile-summary", + R"( + The commit summary to use when committing changed flake lock files. If + empty, the summary is generated based on the action performed. + )"}; +}; + +// FIXME: don't use a global variable. +extern FetchSettings fetchSettings; + +} diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 976f40d3b..6957d2da4 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -238,9 +238,18 @@ std::optional<std::string> Input::getRef() const std::optional<Hash> Input::getRev() const { - if (auto s = maybeGetStrAttr(attrs, "rev")) - return Hash::parseAny(*s, htSHA1); - return {}; + std::optional<Hash> hash = {}; + + if (auto s = maybeGetStrAttr(attrs, "rev")) { + try { + hash = Hash::parseAnyPrefixed(*s); + } catch (BadHash &e) { + // Default to sha1 for backwards compatibility with existing flakes + hash = Hash::parseAny(*s, htSHA1); + } + } + + return hash; } std::optional<uint64_t> Input::getRevCount() const diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 7f65c1533..7b7a1be35 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -5,13 +5,20 @@ #include "store-api.hh" #include "url-parts.hh" #include "pathlocks.hh" +#include "util.hh" +#include "git.hh" +#include "fetch-settings.hh" + +#include <regex> +#include <string.h> #include <sys/time.h> #include <sys/wait.h> using namespace std::string_literals; namespace nix::fetchers { +namespace { // Explicit initial branch of our bare repo to suppress warnings from new version of git. // The value itself does not matter, since we always fetch a specific revision or branch. @@ -19,17 +26,227 @@ namespace nix::fetchers { // old version of git, which will ignore unrecognized `-c` options. const std::string gitInitialBranch = "__nix_dummy_branch"; -static std::string readHead(const Path & path) +bool isCacheFileWithinTtl(const time_t now, const struct stat & st) +{ + return st.st_mtime + settings.tarballTtl > now; +} + +bool touchCacheFile(const Path& path, const time_t& touch_time) +{ + struct timeval times[2]; + times[0].tv_sec = touch_time; + times[0].tv_usec = 0; + times[1].tv_sec = touch_time; + times[1].tv_usec = 0; + + return lutimes(path.c_str(), times) == 0; +} + +Path getCachePath(std::string key) +{ + return getCacheDir() + "/nix/gitv3/" + + hashString(htSHA256, key).to_string(Base32, false); +} + +// Returns the name of the HEAD branch. +// +// Returns the head branch name as reported by git ls-remote --symref, e.g., if +// ls-remote returns the output below, "main" is returned based on the ref line. +// +// ref: refs/heads/main HEAD +// ... +std::optional<std::string> readHead(const Path & path) +{ + auto [exit_code, output] = runProgram(RunOptions { + .program = "git", + .args = {"ls-remote", "--symref", path}, + }); + if (exit_code != 0) { + return std::nullopt; + } + + std::string_view line = output; + line = line.substr(0, line.find("\n")); + if (const auto parseResult = git::parseLsRemoteLine(line)) { + switch (parseResult->kind) { + case git::LsRemoteRefLine::Kind::Symbolic: + debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path); + break; + case git::LsRemoteRefLine::Kind::Object: + debug("resolved HEAD rev '%s' for repo '%s'", parseResult->target, path); + break; + } + return parseResult->target; + } + return std::nullopt; +} + +// Persist the HEAD ref from the remote repo in the local cached repo. +bool storeCachedHead(const std::string& actualUrl, const std::string& headRef) +{ + Path cacheDir = getCachePath(actualUrl); + auto gitDir = "."; + try { + runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef }); + } catch (ExecError &e) { + if (!WIFEXITED(e.status)) throw; + return false; + } + /* No need to touch refs/HEAD, because `git symbolic-ref` updates the mtime. */ + return true; +} + +std::optional<std::string> readHeadCached(const std::string& actualUrl) { - return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" })); + // Create a cache path to store the branch of the HEAD ref. Append something + // in front of the URL to prevent collision with the repository itself. + Path cacheDir = getCachePath(actualUrl); + Path headRefFile = cacheDir + "/HEAD"; + + time_t now = time(0); + struct stat st; + std::optional<std::string> cachedRef; + if (stat(headRefFile.c_str(), &st) == 0) { + cachedRef = readHead(cacheDir); + if (cachedRef != std::nullopt && + *cachedRef != gitInitialBranch && + isCacheFileWithinTtl(now, st)) { + debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl); + return cachedRef; + } + } + + auto ref = readHead(actualUrl); + if (ref) { + return ref; + } + + if (cachedRef) { + // If the cached git ref is expired in fetch() below, and the 'git fetch' + // fails, it falls back to continuing with the most recent version. + // This function must behave the same way, so we return the expired + // cached ref here. + warn("could not get HEAD ref for repository '%s'; using expired cached ref '%s'", actualUrl, *cachedRef); + return *cachedRef; + } + + return std::nullopt; +} + +bool isNotDotGitDirectory(const Path & path) +{ + return baseNameOf(path) != ".git"; +} + +struct WorkdirInfo +{ + bool clean = false; + bool hasHead = false; +}; + +// Returns whether a git workdir is clean and has commits. +WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir) +{ + const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false); + std::string gitDir(".git"); + + auto env = getEnv(); + // Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong + // that way unknown errors can lead to a failure instead of continuing through the wrong code path + env["LC_ALL"] = "C"; + + /* Check whether HEAD points to something that looks like a commit, + since that is the refrence we want to use later on. */ + auto result = runProgram(RunOptions { + .program = "git", + .args = { "-C", workdir, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" }, + .environment = env, + .mergeStderrToStdout = true + }); + auto exitCode = WEXITSTATUS(result.first); + auto errorMessage = result.second; + + if (errorMessage.find("fatal: not a git repository") != std::string::npos) { + throw Error("'%s' is not a Git repository", workdir); + } else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) { + // indicates that the repo does not have any commits + // we want to proceed and will consider it dirty later + } else if (exitCode != 0) { + // any other errors should lead to a failure + throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", workdir, exitCode, errorMessage); + } + + bool clean = false; + bool hasHead = exitCode == 0; + + try { + if (hasHead) { + // Using git diff is preferrable over lower-level operations here, + // because its conceptually simpler and we only need the exit code anyways. + auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"}); + if (!submodules) { + // Changes in submodules should only make the tree dirty + // when those submodules will be copied as well. + gitDiffOpts.emplace_back("--ignore-submodules"); + } + gitDiffOpts.emplace_back("--"); + runProgram("git", true, gitDiffOpts); + + clean = true; + } + } catch (ExecError & e) { + if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw; + } + + return WorkdirInfo { .clean = clean, .hasHead = hasHead }; } -static bool isNotDotGitDirectory(const Path & path) +std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo) { - static const std::regex gitDirRegex("^(?:.*/)?\\.git$"); + const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false); + auto gitDir = ".git"; + + if (!fetchSettings.allowDirty) + throw Error("Git tree '%s' is dirty", workdir); + + if (fetchSettings.warnDirty) + warn("Git tree '%s' is dirty", workdir); + + auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" }); + if (submodules) + gitOpts.emplace_back("--recurse-submodules"); + + auto files = tokenizeString<std::set<std::string>>( + runProgram("git", true, gitOpts), "\0"s); + + Path actualPath(absPath(workdir)); + + PathFilter filter = [&](const Path & p) -> bool { + assert(hasPrefix(p, actualPath)); + std::string file(p, actualPath.size() + 1); + + auto st = lstat(p); + + if (S_ISDIR(st.st_mode)) { + auto prefix = file + "/"; + auto i = files.lower_bound(prefix); + return i != files.end() && hasPrefix(*i, prefix); + } - return not std::regex_match(path, gitDirRegex); + return files.count(file); + }; + + auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter); + + // FIXME: maybe we should use the timestamp of the last + // modified dirty file? + input.attrs.insert_or_assign( + "lastModified", + workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0); + + return {std::move(storePath), input}; } +} // end namespace struct GitInputScheme : InputScheme { @@ -150,13 +367,14 @@ struct GitInputScheme : InputScheme { auto sourcePath = getSourcePath(input); assert(sourcePath); + auto gitDir = ".git"; runProgram("git", true, - { "-C", *sourcePath, "add", "--force", "--intent-to-add", "--", std::string(file) }); + { "-C", *sourcePath, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(file) }); if (commitMsg) runProgram("git", true, - { "-C", *sourcePath, "commit", std::string(file), "-m", *commitMsg }); + { "-C", *sourcePath, "--git-dir", gitDir, "commit", std::string(file), "-m", *commitMsg }); } std::pair<bool, std::string> getActualUrl(const Input & input) const @@ -175,6 +393,7 @@ struct GitInputScheme : InputScheme std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override { Input input(_input); + auto gitDir = ".git"; std::string name = input.getName(); @@ -187,8 +406,16 @@ struct GitInputScheme : InputScheme if (submodules) cacheType += "-submodules"; if (allRefs) cacheType += "-all-refs"; + auto checkHashType = [&](const std::optional<Hash> & hash) + { + if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256)) + throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true)); + }; + auto getLockedAttrs = [&]() { + checkHashType(input.getRev()); + return Attrs({ {"type", cacheType}, {"name", name}, @@ -215,97 +442,54 @@ struct GitInputScheme : InputScheme auto [isLocal, actualUrl_] = getActualUrl(input); auto actualUrl = actualUrl_; // work around clang bug - // If this is a local directory and no ref or revision is - // given, then allow the use of an unclean working tree. + /* If this is a local directory and no ref or revision is given, + allow fetching directly from a dirty workdir. */ if (!input.getRef() && !input.getRev() && isLocal) { - bool clean = false; - - /* Check whether this repo has any commits. There are - probably better ways to do this. */ - auto gitDir = actualUrl + "/.git"; - auto commonGitDir = chomp(runProgram( - "git", - true, - { "-C", actualUrl, "rev-parse", "--git-common-dir" } - )); - if (commonGitDir != ".git") - gitDir = commonGitDir; - - bool haveCommits = !readDirectory(gitDir + "/refs/heads").empty(); - - try { - if (haveCommits) { - runProgram("git", true, { "-C", actualUrl, "diff-index", "--quiet", "HEAD", "--" }); - clean = true; - } - } catch (ExecError & e) { - if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw; - } - - if (!clean) { - - /* This is an unclean working tree. So copy all tracked files. */ - - if (!settings.allowDirty) - throw Error("Git tree '%s' is dirty", actualUrl); - - if (settings.warnDirty) - warn("Git tree '%s' is dirty", actualUrl); - - auto gitOpts = Strings({ "-C", actualUrl, "ls-files", "-z" }); - if (submodules) - gitOpts.emplace_back("--recurse-submodules"); - - auto files = tokenizeString<std::set<std::string>>( - runProgram("git", true, gitOpts), "\0"s); - - PathFilter filter = [&](const Path & p) -> bool { - assert(hasPrefix(p, actualUrl)); - std::string file(p, actualUrl.size() + 1); - - auto st = lstat(p); - - if (S_ISDIR(st.st_mode)) { - auto prefix = file + "/"; - auto i = files.lower_bound(prefix); - return i != files.end() && hasPrefix(*i, prefix); - } - - return files.count(file); - }; - - auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter); - - // FIXME: maybe we should use the timestamp of the last - // modified dirty file? - input.attrs.insert_or_assign( - "lastModified", - haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0); - - return {std::move(storePath), input}; + auto workdirInfo = getWorkdirInfo(input, actualUrl); + if (!workdirInfo.clean) { + return fetchFromWorkdir(store, input, actualUrl, workdirInfo); } } - if (!input.getRef()) input.attrs.insert_or_assign("ref", isLocal ? readHead(actualUrl) : "master"); - Attrs unlockedAttrs({ {"type", cacheType}, {"name", name}, {"url", actualUrl}, - {"ref", *input.getRef()}, }); Path repoDir; if (isLocal) { + if (!input.getRef()) { + auto head = readHead(actualUrl); + if (!head) { + warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl); + head = "master"; + } + input.attrs.insert_or_assign("ref", *head); + unlockedAttrs.insert_or_assign("ref", *head); + } if (!input.getRev()) input.attrs.insert_or_assign("rev", - Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input.getRef() })), htSHA1).gitRev()); + Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev()); repoDir = actualUrl; - } else { + const bool useHeadRef = !input.getRef(); + if (useHeadRef) { + auto head = readHeadCached(actualUrl); + if (!head) { + warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl); + head = "master"; + } + input.attrs.insert_or_assign("ref", *head); + unlockedAttrs.insert_or_assign("ref", *head); + } else { + if (!input.getRev()) { + unlockedAttrs.insert_or_assign("ref", input.getRef().value()); + } + } if (auto res = getCache()->lookup(store, unlockedAttrs)) { auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1); @@ -315,8 +499,9 @@ struct GitInputScheme : InputScheme } } - Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false); + Path cacheDir = getCachePath(actualUrl); repoDir = cacheDir; + gitDir = "."; createDirs(dirOf(cacheDir)); PathLocks cacheDirLock({cacheDir + ".lock"}); @@ -337,7 +522,7 @@ struct GitInputScheme : InputScheme repo. */ if (input.getRev()) { try { - runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input.getRev()->gitRev() }); + runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "cat-file", "-e", input.getRev()->gitRev() }); doFetch = false; } catch (ExecError & e) { if (WIFEXITED(e.status)) { @@ -354,7 +539,7 @@ struct GitInputScheme : InputScheme git fetch to update the local ref to the remote ref. */ struct stat st; doFetch = stat(localRefFile.c_str(), &st) != 0 || - (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now; + !isCacheFileWithinTtl(now, st); } } @@ -372,19 +557,16 @@ struct GitInputScheme : InputScheme : ref == "HEAD" ? *ref : "refs/heads/" + *ref; - runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }); + runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }); } catch (Error & e) { if (!pathExists(localRefFile)) throw; warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl); } - struct timeval times[2]; - times[0].tv_sec = now; - times[0].tv_usec = 0; - times[1].tv_sec = now; - times[1].tv_usec = 0; - - utimes(localRefFile.c_str(), times); + if (!touchCacheFile(localRefFile, now)) + warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno)); + if (useHeadRef && !storeCachedHead(actualUrl, *input.getRef())) + warn("could not update cached head '%s' for '%s'", *input.getRef(), actualUrl); } if (!input.getRev()) @@ -393,10 +575,10 @@ struct GitInputScheme : InputScheme // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder } - bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true"; + bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true"; if (isShallow && !shallow) - throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl); + throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl); // FIXME: check whether rev is an ancestor of ref. @@ -413,7 +595,7 @@ struct GitInputScheme : InputScheme auto result = runProgram(RunOptions { .program = "git", - .args = { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() }, + .args = { "-C", repoDir, "--git-dir", gitDir, "cat-file", "commit", input.getRev()->gitRev() }, .mergeStderrToStdout = true }); if (WEXITSTATUS(result.first) == 128 @@ -452,7 +634,7 @@ struct GitInputScheme : InputScheme auto source = sinkToSource([&](Sink & sink) { runProgram2({ .program = "git", - .args = { "-C", repoDir, "archive", input.getRev()->gitRev() }, + .args = { "-C", repoDir, "--git-dir", gitDir, "archive", input.getRev()->gitRev() }, .standardOut = &sink }); }); @@ -462,7 +644,7 @@ struct GitInputScheme : InputScheme auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter); - auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() })); + auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() })); Attrs infoAttrs({ {"rev", input.getRev()->gitRev()}, @@ -471,7 +653,7 @@ struct GitInputScheme : InputScheme if (!shallow) infoAttrs.insert_or_assign("revCount", - std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input.getRev()->gitRev() }))); + std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-list", "--count", input.getRev()->gitRev() }))); if (!_input.getRev()) getCache()->add( diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 70622bf79..2115ce2f5 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -1,10 +1,12 @@ #include "filetransfer.hh" #include "cache.hh" -#include "fetchers.hh" #include "globals.hh" #include "store-api.hh" #include "types.hh" #include "url-parts.hh" +#include "git.hh" +#include "fetchers.hh" +#include "fetch-settings.hh" #include <optional> #include <nlohmann/json.hpp> @@ -157,7 +159,7 @@ struct GitArchiveInputScheme : InputScheme std::optional<std::string> getAccessToken(const std::string & host) const { - auto tokens = settings.accessTokens.get(); + auto tokens = fetchSettings.accessTokens.get(); if (auto token = get(tokens, host)) return *token; return {}; @@ -241,7 +243,10 @@ struct GitHubInputScheme : GitArchiveInputScheme Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); - auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check + auto url = fmt( + host == "github.com" + ? "https://api.%s/repos/%s/%s/commits/%s" + : "https://%s/api/v3/repos/%s/%s/commits/%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); Headers headers = makeHeadersWithAuthTokens(host); @@ -257,14 +262,20 @@ struct GitHubInputScheme : GitArchiveInputScheme DownloadUrl getDownloadUrl(const Input & input) const override { - // FIXME: use regular /archive URLs instead? api.github.com - // might have stricter rate limits. auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); - auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances - host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), + Headers headers = makeHeadersWithAuthTokens(host); + // If we have no auth headers then we default to the public archive + // urls so we do not run into rate limits. + const auto urlFmt = + host != "github.com" + ? "https://%s/api/v3/repos/%s/%s/tarball/%s" + : headers.empty() + ? "https://%s/%s/%s/archive/%s.tar.gz" + : "https://api.%s/repos/%s/%s/tarball/%s"; + + const auto url = fmt(urlFmt, host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), input.getRev()->to_string(Base16, false)); - Headers headers = makeHeadersWithAuthTokens(host); return DownloadUrl { url, headers }; } @@ -373,7 +384,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme Headers headers = makeHeadersWithAuthTokens(host); - std::string ref_uri; + std::string refUri; if (ref == "HEAD") { auto file = store->toRealPath( downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath); @@ -381,33 +392,32 @@ struct SourceHutInputScheme : GitArchiveInputScheme std::string line; getline(is, line); - auto ref_index = line.find("ref: "); - if (ref_index == std::string::npos) { + auto remoteLine = git::parseLsRemoteLine(line); + if (!remoteLine) { throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref); } - - ref_uri = line.substr(ref_index+5, line.length()-1); - } else - ref_uri = fmt("refs/heads/%s", ref); + refUri = remoteLine->target; + } else { + refUri = fmt("refs/(heads|tags)/%s", ref); + } + std::regex refRegex(refUri); auto file = store->toRealPath( downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath); std::ifstream is(file); std::string line; - std::string id; - while(getline(is, line)) { - auto index = line.find(ref_uri); - if (index != std::string::npos) { - id = line.substr(0, index-1); - break; - } + std::optional<std::string> id; + while(!id && getline(is, line)) { + auto parsedLine = git::parseLsRemoteLine(line); + if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex)) + id = parsedLine->target; } - if(id.empty()) + if(!id) throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref); - auto rev = Hash::parseAny(id, htSHA1); + auto rev = Hash::parseAny(*id, htSHA1); debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev()); return rev; } diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 12cdecbc1..5c5671681 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -5,6 +5,8 @@ #include "store-api.hh" #include "url-parts.hh" +#include "fetch-settings.hh" + #include <sys/time.h> using namespace std::string_literals; @@ -34,7 +36,7 @@ static std::string runHg(const Strings & args, const std::optional<std::string> auto res = runProgram(std::move(opts)); if (!statusOk(res.first)) - throw ExecError(res.first, fmt("hg %1%", statusToString(res.first))); + throw ExecError(res.first, "hg %1%", statusToString(res.first)); return res.second; } @@ -165,10 +167,10 @@ struct MercurialInputScheme : InputScheme /* This is an unclean working tree. So copy all tracked files. */ - if (!settings.allowDirty) + if (!fetchSettings.allowDirty) throw Error("Mercurial tree '%s' is unclean", actualUrl); - if (settings.warnDirty) + if (fetchSettings.warnDirty) warn("Mercurial tree '%s' is unclean", actualUrl); input.attrs.insert_or_assign("ref", chomp(runHg({ "branch", "-R", actualUrl }))); @@ -176,9 +178,11 @@ struct MercurialInputScheme : InputScheme auto files = tokenizeString<std::set<std::string>>( runHg({ "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s); + Path actualPath(absPath(actualUrl)); + PathFilter filter = [&](const Path & p) -> bool { - assert(hasPrefix(p, actualUrl)); - std::string file(p, actualUrl.size() + 1); + assert(hasPrefix(p, actualPath)); + std::string file(p, actualPath.size() + 1); auto st = lstat(p); @@ -191,7 +195,7 @@ struct MercurialInputScheme : InputScheme return files.count(file); }; - auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter); + auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter); return {std::move(storePath), input}; } @@ -199,8 +203,17 @@ struct MercurialInputScheme : InputScheme if (!input.getRef()) input.attrs.insert_or_assign("ref", "default"); + auto checkHashType = [&](const std::optional<Hash> & hash) + { + if (hash.has_value() && hash->type != htSHA1) + throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true)); + }; + + auto getLockedAttrs = [&]() { + checkHashType(input.getRev()); + return Attrs({ {"type", "hg"}, {"name", name}, @@ -260,7 +273,7 @@ struct MercurialInputScheme : InputScheme runHg({ "recover", "-R", cacheDir }); runHg({ "pull", "-R", cacheDir, "--", actualUrl }); } else { - throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status))); + throw ExecError(e.status, "'hg pull' %s", statusToString(e.status)); } } } else { diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index 59e228e97..f0ef97da5 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -1,5 +1,6 @@ #include "fetchers.hh" #include "store-api.hh" +#include "archive.hh" namespace nix::fetchers { @@ -80,8 +81,9 @@ struct PathInputScheme : InputScheme // nothing to do } - std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override + std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override { + Input input(_input); std::string absPath; auto path = getStrAttr(input.attrs, "path"); @@ -111,9 +113,15 @@ struct PathInputScheme : InputScheme if (storePath) store->addTempRoot(*storePath); - if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) + time_t mtime = 0; + if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) { // FIXME: try to substitute storePath. - storePath = store->addToStore("source", absPath); + auto src = sinkToSource([&](Sink & sink) { + mtime = dumpPathAndGetMtime(absPath, sink, defaultPathFilter); + }); + storePath = store->addToStoreFromDump(*src, "source"); + } + input.attrs.insert_or_assign("lastModified", uint64_t(mtime)); return {std::move(*storePath), input}; } diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index f35359d4b..acd1ff866 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -5,6 +5,8 @@ #include "store-api.hh" #include "local-fs-store.hh" +#include "fetch-settings.hh" + #include <nlohmann/json.hpp> namespace nix::fetchers { @@ -150,7 +152,7 @@ void overrideRegistry( static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store) { static auto reg = [&]() { - auto path = settings.flakeRegistry.get(); + auto path = fetchSettings.flakeRegistry.get(); if (!hasPrefix(path, "/")) { auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath; diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index dde0ad761..6c551bd93 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -6,6 +6,7 @@ #include "archive.hh" #include "tarfile.hh" #include "types.hh" +#include "split.hh" namespace nix::fetchers { @@ -168,24 +169,34 @@ std::pair<Tree, time_t> downloadTarball( }; } -struct TarballInputScheme : InputScheme +// An input scheme corresponding to a curl-downloadable resource. +struct CurlInputScheme : InputScheme { - std::optional<Input> inputFromURL(const ParsedURL & url) override + virtual const std::string inputType() const = 0; + const std::set<std::string> transportUrlSchemes = {"file", "http", "https"}; + + const bool hasTarballExtension(std::string_view path) const { - if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return {}; + return hasSuffix(path, ".zip") || hasSuffix(path, ".tar") + || hasSuffix(path, ".tgz") || hasSuffix(path, ".tar.gz") + || hasSuffix(path, ".tar.xz") || hasSuffix(path, ".tar.bz2") + || hasSuffix(path, ".tar.zst"); + } - if (!hasSuffix(url.path, ".zip") - && !hasSuffix(url.path, ".tar") - && !hasSuffix(url.path, ".tgz") - && !hasSuffix(url.path, ".tar.gz") - && !hasSuffix(url.path, ".tar.xz") - && !hasSuffix(url.path, ".tar.bz2") - && !hasSuffix(url.path, ".tar.zst")) - return {}; + virtual bool isValidURL(const ParsedURL & url) const = 0; + + std::optional<Input> inputFromURL(const ParsedURL & url) override + { + if (!isValidURL(url)) + return std::nullopt; Input input; - input.attrs.insert_or_assign("type", "tarball"); - input.attrs.insert_or_assign("url", url.to_string()); + + auto urlWithoutApplicationScheme = url; + urlWithoutApplicationScheme.scheme = parseUrlScheme(url.scheme).transport; + + input.attrs.insert_or_assign("type", inputType()); + input.attrs.insert_or_assign("url", urlWithoutApplicationScheme.to_string()); auto narHash = url.query.find("narHash"); if (narHash != url.query.end()) input.attrs.insert_or_assign("narHash", narHash->second); @@ -194,14 +205,17 @@ struct TarballInputScheme : InputScheme std::optional<Input> inputFromAttrs(const Attrs & attrs) override { - if (maybeGetStrAttr(attrs, "type") != "tarball") return {}; + auto type = maybeGetStrAttr(attrs, "type"); + if (type != inputType()) return {}; + std::set<std::string> allowedNames = {"type", "url", "narHash", "name", "unpack"}; for (auto & [name, value] : attrs) - if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash" && name != "name") - throw Error("unsupported tarball input attribute '%s'", name); + if (!allowedNames.count(name)) + throw Error("unsupported %s input attribute '%s'", *type, name); Input input; input.attrs = attrs; + //input.locked = (bool) maybeGetStrAttr(input.attrs, "hash"); return input; } @@ -209,14 +223,9 @@ struct TarballInputScheme : InputScheme ParsedURL toURL(const Input & input) override { auto url = parseURL(getStrAttr(input.attrs, "url")); - // NAR hashes are preferred over file hashes since tar/zip files - // don't have a canonical representation. + // NAR hashes are preferred over file hashes since tar/zip files // don't have a canonical representation. if (auto narHash = input.getNarHash()) url.query.insert_or_assign("narHash", narHash->to_string(SRI, true)); - /* - else if (auto hash = maybeGetStrAttr(input.attrs, "hash")) - url.query.insert_or_assign("hash", Hash(*hash).to_string(SRI, true)); - */ return url; } @@ -225,6 +234,42 @@ struct TarballInputScheme : InputScheme return true; } +}; + +struct FileInputScheme : CurlInputScheme +{ + const std::string inputType() const override { return "file"; } + + bool isValidURL(const ParsedURL & url) const override + { + auto parsedUrlScheme = parseUrlScheme(url.scheme); + return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) + && (parsedUrlScheme.application + ? parsedUrlScheme.application.value() == inputType() + : !hasTarballExtension(url.path)); + } + + std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override + { + auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false); + return {std::move(file.storePath), input}; + } +}; + +struct TarballInputScheme : CurlInputScheme +{ + const std::string inputType() const override { return "tarball"; } + + bool isValidURL(const ParsedURL & url) const override + { + auto parsedUrlScheme = parseUrlScheme(url.scheme); + + return transportUrlSchemes.count(std::string(parsedUrlScheme.transport)) + && (parsedUrlScheme.application + ? parsedUrlScheme.application.value() == inputType() + : hasTarballExtension(url.path)); + } + std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override { auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first; @@ -233,5 +278,6 @@ struct TarballInputScheme : InputScheme }; static auto rTarballInputScheme = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); }); +static auto rFileInputScheme = OnStartup([] { registerInputScheme(std::make_unique<FileInputScheme>()); }); } diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index 12f5403ea..f92920d18 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -32,6 +32,7 @@ MixCommonArgs::MixCommonArgs(const std::string & programName) addFlag({ .longName = "option", .description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).", + .category = miscCategory, .labels = {"name", "value"}, .handler = {[](std::string name, std::string value) { try { diff --git a/src/libmain/common-args.hh b/src/libmain/common-args.hh index 25453b8c6..f180d83ce 100644 --- a/src/libmain/common-args.hh +++ b/src/libmain/common-args.hh @@ -6,6 +6,7 @@ namespace nix { //static constexpr auto commonArgsCategory = "Miscellaneous common options"; static constexpr auto loggingCategory = "Logging-related options"; +static constexpr auto miscCategory = "Miscellaneous global options"; class MixCommonArgs : public virtual Args { diff --git a/src/libmain/loggers.cc b/src/libmain/loggers.cc index cdf23859b..cda5cb939 100644 --- a/src/libmain/loggers.cc +++ b/src/libmain/loggers.cc @@ -30,8 +30,11 @@ Logger * makeDefaultLogger() { return makeJSONLogger(*makeSimpleLogger(true)); case LogFormat::bar: return makeProgressBar(); - case LogFormat::barWithLogs: - return makeProgressBar(true); + case LogFormat::barWithLogs: { + auto logger = makeProgressBar(); + logger->setPrintBuildLogs(true); + return logger; + } default: abort(); } diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index f4306ab91..961f4e18a 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -8,6 +8,7 @@ #include <map> #include <thread> #include <iostream> +#include <chrono> namespace nix { @@ -48,6 +49,7 @@ private: bool visible = true; ActivityId parent; std::optional<std::string> name; + std::chrono::time_point<std::chrono::steady_clock> startTime; }; struct ActivitiesByType @@ -79,22 +81,22 @@ private: std::condition_variable quitCV, updateCV; - bool printBuildLogs; + bool printBuildLogs = false; bool isTTY; public: - ProgressBar(bool printBuildLogs, bool isTTY) - : printBuildLogs(printBuildLogs) - , isTTY(isTTY) + ProgressBar(bool isTTY) + : isTTY(isTTY) { state_.lock()->active = isTTY; updateThread = std::thread([&]() { auto state(state_.lock()); + auto nextWakeup = std::chrono::milliseconds::max(); while (state->active) { if (!state->haveUpdate) - state.wait(updateCV); - draw(*state); + state.wait_for(updateCV, nextWakeup); + nextWakeup = draw(*state); state.wait_for(quitCV, std::chrono::milliseconds(50)); } }); @@ -118,7 +120,8 @@ public: updateThread.join(); } - bool isVerbose() override { + bool isVerbose() override + { return printBuildLogs; } @@ -159,11 +162,13 @@ public: if (lvl <= verbosity && !s.empty() && type != actBuildWaiting) log(*state, lvl, s + "..."); - state->activities.emplace_back(ActInfo()); + state->activities.emplace_back(ActInfo { + .s = s, + .type = type, + .parent = parent, + .startTime = std::chrono::steady_clock::now() + }); auto i = std::prev(state->activities.end()); - i->s = s; - i->type = type; - i->parent = parent; state->its.emplace(act, i); state->activitiesByType[type].its.emplace(act, i); @@ -327,10 +332,12 @@ public: updateCV.notify_one(); } - void draw(State & state) + std::chrono::milliseconds draw(State & state) { + auto nextWakeup = std::chrono::milliseconds::max(); + state.haveUpdate = false; - if (!state.active) return; + if (!state.active) return nextWakeup; std::string line; @@ -341,12 +348,25 @@ public: line += "]"; } + auto now = std::chrono::steady_clock::now(); + if (!state.activities.empty()) { if (!status.empty()) line += " "; auto i = state.activities.rbegin(); - while (i != state.activities.rend() && (!i->visible || (i->s.empty() && i->lastLine.empty()))) + while (i != state.activities.rend()) { + if (i->visible && (!i->s.empty() || !i->lastLine.empty())) { + /* Don't show activities until some time has + passed, to avoid displaying very short + activities. */ + auto delay = std::chrono::milliseconds(10); + if (i->startTime + delay < now) + break; + else + nextWakeup = std::min(nextWakeup, std::chrono::duration_cast<std::chrono::milliseconds>(delay - (now - i->startTime))); + } ++i; + } if (i != state.activities.rend()) { line += i->s; @@ -366,6 +386,8 @@ public: if (width <= 0) width = std::numeric_limits<decltype(width)>::max(); writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K"); + + return nextWakeup; } std::string getStatus(State & state) @@ -480,19 +502,21 @@ public: draw(*state); return s[0]; } + + void setPrintBuildLogs(bool printBuildLogs) override + { + this->printBuildLogs = printBuildLogs; + } }; -Logger * makeProgressBar(bool printBuildLogs) +Logger * makeProgressBar() { - return new ProgressBar( - printBuildLogs, - shouldANSI() - ); + return new ProgressBar(shouldANSI()); } -void startProgressBar(bool printBuildLogs) +void startProgressBar() { - logger = makeProgressBar(printBuildLogs); + logger = makeProgressBar(); } void stopProgressBar() diff --git a/src/libmain/progress-bar.hh b/src/libmain/progress-bar.hh index 7f0dafecf..3a76f8448 100644 --- a/src/libmain/progress-bar.hh +++ b/src/libmain/progress-bar.hh @@ -4,9 +4,9 @@ namespace nix { -Logger * makeProgressBar(bool printBuildLogs = false); +Logger * makeProgressBar(); -void startProgressBar(bool printBuildLogs = false); +void startProgressBar(); void stopProgressBar(); diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 47fc7ab2e..a58428762 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -1,8 +1,10 @@ #include "globals.hh" #include "shared.hh" #include "store-api.hh" +#include "gc-store.hh" #include "util.hh" #include "loggers.hh" +#include "progress-bar.hh" #include <algorithm> #include <cctype> @@ -31,6 +33,7 @@ namespace nix { +char * * savedArgv; static bool gcWarning = true; @@ -59,37 +62,37 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild, { if (!willBuild.empty()) { if (willBuild.size() == 1) - printMsg(lvl, fmt("this derivation will be built:")); + printMsg(lvl, "this derivation will be built:"); else - printMsg(lvl, fmt("these %d derivations will be built:", willBuild.size())); + printMsg(lvl, "these %d derivations will be built:", willBuild.size()); auto sorted = store->topoSortPaths(willBuild); reverse(sorted.begin(), sorted.end()); for (auto & i : sorted) - printMsg(lvl, fmt(" %s", store->printStorePath(i))); + printMsg(lvl, " %s", store->printStorePath(i)); } if (!willSubstitute.empty()) { const float downloadSizeMiB = downloadSize / (1024.f * 1024.f); const float narSizeMiB = narSize / (1024.f * 1024.f); if (willSubstitute.size() == 1) { - printMsg(lvl, fmt("this path will be fetched (%.2f MiB download, %.2f MiB unpacked):", + printMsg(lvl, "this path will be fetched (%.2f MiB download, %.2f MiB unpacked):", downloadSizeMiB, - narSizeMiB)); + narSizeMiB); } else { - printMsg(lvl, fmt("these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):", + printMsg(lvl, "these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):", willSubstitute.size(), downloadSizeMiB, - narSizeMiB)); + narSizeMiB); } for (auto & i : willSubstitute) - printMsg(lvl, fmt(" %s", store->printStorePath(i))); + printMsg(lvl, " %s", store->printStorePath(i)); } if (!unknown.empty()) { - printMsg(lvl, fmt("don't know how to build these paths%s:", - (settings.readOnlyMode ? " (may be caused by read-only store access)" : ""))); + printMsg(lvl, "don't know how to build these paths%s:", + (settings.readOnlyMode ? " (may be caused by read-only store access)" : "")); for (auto & i : unknown) - printMsg(lvl, fmt(" %s", store->printStorePath(i))); + printMsg(lvl, " %s", store->printStorePath(i)); } } @@ -180,8 +183,9 @@ void initNix() /* Reset SIGCHLD to its default. */ struct sigaction act; sigemptyset(&act.sa_mask); - act.sa_handler = SIG_DFL; act.sa_flags = 0; + + act.sa_handler = SIG_DFL; if (sigaction(SIGCHLD, &act, 0)) throw SysError("resetting SIGCHLD"); @@ -193,9 +197,20 @@ void initNix() /* HACK: on darwin, we need can’t use sigprocmask with SIGWINCH. * Instead, add a dummy sigaction handler, and signalHandlerThread * can handle the rest. */ - struct sigaction sa; - sa.sa_handler = sigHandler; - if (sigaction(SIGWINCH, &sa, 0)) throw SysError("handling SIGWINCH"); + act.sa_handler = sigHandler; + if (sigaction(SIGWINCH, &act, 0)) throw SysError("handling SIGWINCH"); + + /* Disable SA_RESTART for interrupts, so that system calls on this thread + * error with EINTR like they do on Linux. + * Most signals on BSD systems default to SA_RESTART on, but Nix + * expects EINTR from syscalls to properly exit. */ + act.sa_handler = SIG_DFL; + if (sigaction(SIGINT, &act, 0)) throw SysError("handling SIGINT"); + if (sigaction(SIGTERM, &act, 0)) throw SysError("handling SIGTERM"); + if (sigaction(SIGHUP, &act, 0)) throw SysError("handling SIGHUP"); + if (sigaction(SIGPIPE, &act, 0)) throw SysError("handling SIGPIPE"); + if (sigaction(SIGQUIT, &act, 0)) throw SysError("handling SIGQUIT"); + if (sigaction(SIGTRAP, &act, 0)) throw SysError("handling SIGTRAP"); #endif /* Register a SIGSEGV handler to detect stack overflows. */ @@ -227,8 +242,6 @@ LegacyArgs::LegacyArgs(const std::string & programName, std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg) : MixCommonArgs(programName), parseArg(parseArg) { - printError("FOO %s", programName); - addFlag({ .longName = "no-build-output", .shortName = 'Q', @@ -411,6 +424,8 @@ RunPager::RunPager() if (!pager) pager = getenv("PAGER"); if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return; + stopProgressBar(); + Pipe toPager; toPager.create(); diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 0cc56d47d..3c37fd627 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -113,5 +113,25 @@ struct PrintFreed /* Install a SIGSEGV handler to detect stack overflows. */ void detectStackOverflow(); +/* Pluggable behavior to run in case of a stack overflow. + + Default value: defaultStackOverflowHandler. + + This is called by the handler installed by detectStackOverflow(). + + This gives Nix library consumers a limit opportunity to report the error + condition. The handler should exit the process. + See defaultStackOverflowHandler() for a reference implementation. + + NOTE: Use with diligence, because this runs in the signal handler, with very + limited stack space and a potentially a corrupted heap, all while the failed + thread is blocked indefinitely. All functions called must be reentrant. */ +extern std::function<void(siginfo_t * info, void * ctx)> stackOverflowHandler; + +/* The default, robust implementation of stackOverflowHandler. + + Prints an error message directly to stderr using a syscall instead of the + logger. Exits the process immediately after. */ +void defaultStackOverflowHandler(siginfo_t * info, void * ctx); } diff --git a/src/libmain/stack.cc b/src/libmain/stack.cc index b0a4a4c5d..10f71c1dc 100644 --- a/src/libmain/stack.cc +++ b/src/libmain/stack.cc @@ -1,4 +1,5 @@ #include "error.hh" +#include "shared.hh" #include <cstring> #include <cstddef> @@ -29,9 +30,7 @@ static void sigsegvHandler(int signo, siginfo_t * info, void * ctx) ptrdiff_t diff = (char *) info->si_addr - sp; if (diff < 0) diff = -diff; if (diff < 4096) { - char msg[] = "error: stack overflow (possible infinite recursion)\n"; - [[gnu::unused]] auto res = write(2, msg, strlen(msg)); - _exit(1); // maybe abort instead? + nix::stackOverflowHandler(info, ctx); } } @@ -67,5 +66,12 @@ void detectStackOverflow() #endif } +std::function<void(siginfo_t * info, void * ctx)> stackOverflowHandler(defaultStackOverflowHandler); + +void defaultStackOverflowHandler(siginfo_t * info, void * ctx) { + char msg[] = "error: stack overflow (possible infinite recursion)\n"; + [[gnu::unused]] auto res = write(2, msg, strlen(msg)); + _exit(1); // maybe abort instead? +} } diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 9226c4e19..a26770c79 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -331,6 +331,17 @@ bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath) return fileExists(narInfoFileFor(storePath)); } +std::optional<StorePath> BinaryCacheStore::queryPathFromHashPart(const std::string & hashPart) +{ + auto pseudoPath = StorePath(hashPart + "-" + MissingName); + try { + auto info = queryPathInfo(pseudoPath); + return info->path; + } catch (InvalidPath &) { + return std::nullopt; + } +} + void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) { auto info = queryPathInfo(storePath).cast<const NarInfo>(); diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 9603a8caa..8c82e2387 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -2,6 +2,7 @@ #include "crypto.hh" #include "store-api.hh" +#include "log-store.hh" #include "pool.hh" @@ -28,7 +29,9 @@ struct BinaryCacheStoreConfig : virtual StoreConfig "other than -1 which we reserve to indicate Nix defaults should be used"}; }; -class BinaryCacheStore : public virtual BinaryCacheStoreConfig, public virtual Store +class BinaryCacheStore : public virtual BinaryCacheStoreConfig, + public virtual Store, + public virtual LogStore { private: @@ -92,8 +95,7 @@ public: void queryPathInfoUncached(const StorePath & path, Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override; - std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override - { unsupported("queryPathFromHashPart"); } + std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override; void addToStore(const ValidPathInfo & info, Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs) override; diff --git a/src/libstore/build-result.hh b/src/libstore/build-result.hh new file mode 100644 index 000000000..24fb1f763 --- /dev/null +++ b/src/libstore/build-result.hh @@ -0,0 +1,92 @@ +#pragma once + +#include "realisation.hh" +#include "derived-path.hh" + +#include <string> +#include <chrono> + + +namespace nix { + +struct BuildResult +{ + /* Note: don't remove status codes, and only add new status codes + at the end of the list, to prevent client/server + incompatibilities in the nix-store --serve protocol. */ + enum Status { + Built = 0, + Substituted, + AlreadyValid, + PermanentFailure, + InputRejected, + OutputRejected, + TransientFailure, // possibly transient + CachedFailure, // no longer used + TimedOut, + MiscFailure, + DependencyFailed, + LogLimitExceeded, + NotDeterministic, + ResolvesToAlreadyValid, + NoSubstituters, + } status = MiscFailure; + + // FIXME: include entire ErrorInfo object. + std::string errorMsg; + + std::string toString() const { + auto strStatus = [&]() { + switch (status) { + case Built: return "Built"; + case Substituted: return "Substituted"; + case AlreadyValid: return "AlreadyValid"; + case PermanentFailure: return "PermanentFailure"; + case InputRejected: return "InputRejected"; + case OutputRejected: return "OutputRejected"; + case TransientFailure: return "TransientFailure"; + case CachedFailure: return "CachedFailure"; + case TimedOut: return "TimedOut"; + case MiscFailure: return "MiscFailure"; + case DependencyFailed: return "DependencyFailed"; + case LogLimitExceeded: return "LogLimitExceeded"; + case NotDeterministic: return "NotDeterministic"; + case ResolvesToAlreadyValid: return "ResolvesToAlreadyValid"; + default: return "Unknown"; + }; + }(); + return strStatus + ((errorMsg == "") ? "" : " : " + errorMsg); + } + + /* How many times this build was performed. */ + unsigned int timesBuilt = 0; + + /* If timesBuilt > 1, whether some builds did not produce the same + result. (Note that 'isNonDeterministic = false' does not mean + the build is deterministic, just that we don't have evidence of + non-determinism.) */ + bool isNonDeterministic = false; + + /* The derivation we built or the store path we substituted. */ + DerivedPath path; + + /* For derivations, a mapping from the names of the wanted outputs + to actual paths. */ + DrvOutputs builtOutputs; + + /* The start/stop times of the build (or one of the rounds, if it + was repeated). */ + time_t startTime = 0, stopTime = 0; + + bool success() + { + return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid; + } + + void rethrow() + { + throw Error("%s", errorMsg); + } +}; + +} diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 95da1841d..41d2e2a1c 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -66,7 +66,7 @@ namespace nix { DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode) - : Goal(worker) + : Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs }) , useDerivation(true) , drvPath(drvPath) , wantedOutputs(wantedOutputs) @@ -85,7 +85,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode) - : Goal(worker) + : Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs }) , useDerivation(false) , drvPath(drvPath) , wantedOutputs(wantedOutputs) @@ -135,7 +135,7 @@ void DerivationGoal::killChild() void DerivationGoal::timedOut(Error && ex) { killChild(); - done(BuildResult::TimedOut, ex); + done(BuildResult::TimedOut, {}, ex); } @@ -182,7 +182,7 @@ void DerivationGoal::loadDerivation() trace("loading derivation"); if (nrFailed != 0) { - done(BuildResult::MiscFailure, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))); + done(BuildResult::MiscFailure, {}, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))); return; } @@ -204,10 +204,33 @@ void DerivationGoal::haveDerivation() { trace("have derivation"); - if (drv->type() == DerivationType::CAFloating) + parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv); + + if (!drv->type().hasKnownOutputPaths()) settings.requireExperimentalFeature(Xp::CaDerivations); - retrySubstitution = false; + if (!drv->type().isPure()) { + settings.requireExperimentalFeature(Xp::ImpureDerivations); + + for (auto & [outputName, output] : drv->outputs) { + auto randomPath = StorePath::random(outputPathName(drv->name, outputName)); + assert(!worker.store.isValidPath(randomPath)); + initialOutputs.insert({ + outputName, + InitialOutput { + .wanted = true, + .outputHash = impureOutputHash, + .known = InitialOutputStatus { + .path = randomPath, + .status = PathStatus::Absent + } + } + }); + } + + gaveUpOnSubstitution(); + return; + } for (auto & i : drv->outputsAndOptPaths(worker.store)) if (i.second.second) @@ -215,34 +238,23 @@ void DerivationGoal::haveDerivation() auto outputHashes = staticOutputHashes(worker.evalStore, *drv); for (auto & [outputName, outputHash] : outputHashes) - initialOutputs.insert({ + initialOutputs.insert({ outputName, - InitialOutput{ + InitialOutput { .wanted = true, // Will be refined later .outputHash = outputHash } - }); + }); /* Check what outputs paths are not already valid. */ - checkPathValidity(); - bool allValid = true; - for (auto & [_, status] : initialOutputs) { - if (!status.wanted) continue; - if (!status.known || !status.known->isValid()) { - allValid = false; - break; - } - } + auto [allValid, validOutputs] = checkPathValidity(); /* If they are all valid, then we're done. */ if (allValid && buildMode == bmNormal) { - done(BuildResult::AlreadyValid); + done(BuildResult::AlreadyValid, std::move(validOutputs)); return; } - parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv); - - /* We are first going to try to create the invalid output paths through substitutes. If that doesn't work, we'll build them. */ @@ -276,8 +288,10 @@ void DerivationGoal::outputsSubstitutionTried() { trace("all outputs substituted (maybe)"); + assert(drv->type().isPure()); + if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) { - done(BuildResult::TransientFailure, + done(BuildResult::TransientFailure, {}, Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", worker.store.printStorePath(drvPath))); return; @@ -301,23 +315,17 @@ void DerivationGoal::outputsSubstitutionTried() return; } - checkPathValidity(); - size_t nrInvalid = 0; - for (auto & [_, status] : initialOutputs) { - if (!status.wanted) continue; - if (!status.known || !status.known->isValid()) - nrInvalid++; - } + auto [allValid, validOutputs] = checkPathValidity(); - if (buildMode == bmNormal && nrInvalid == 0) { - done(BuildResult::Substituted); + if (buildMode == bmNormal && allValid) { + done(BuildResult::Substituted, std::move(validOutputs)); return; } - if (buildMode == bmRepair && nrInvalid == 0) { + if (buildMode == bmRepair && allValid) { repairClosure(); return; } - if (buildMode == bmCheck && nrInvalid > 0) + if (buildMode == bmCheck && !allValid) throw Error("some outputs of '%s' are not valid, so checking is not possible", worker.store.printStorePath(drvPath)); @@ -325,18 +333,27 @@ void DerivationGoal::outputsSubstitutionTried() gaveUpOnSubstitution(); } + /* At least one of the output paths could not be produced using a substitute. So we have to build instead. */ void DerivationGoal::gaveUpOnSubstitution() { - /* Make sure checkPathValidity() from now on checks all - outputs. */ - wantedOutputs.clear(); - /* The inputs must be built before we can build this goal. */ + inputDrvOutputs.clear(); if (useDerivation) - for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) + for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) { + /* Ensure that pure, non-fixed-output derivations don't + depend on impure derivations. */ + if (settings.isExperimentalFeatureEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) { + auto inputDrv = worker.evalStore.readDerivation(i.first); + if (!inputDrv.type().isPure()) + throw Error("pure derivation '%s' depends on impure derivation '%s'", + worker.store.printStorePath(drvPath), + worker.store.printStorePath(i.first)); + } + addWaitee(worker.makeDerivationGoal(i.first, i.second, buildMode == bmRepair ? bmRepair : bmNormal)); + } /* Copy the input sources from the eval store to the build store. */ @@ -364,6 +381,8 @@ void DerivationGoal::gaveUpOnSubstitution() void DerivationGoal::repairClosure() { + assert(drv->type().isPure()); + /* If we're repairing, we now know that our own outputs are valid. Now check whether the other paths in the outputs closure are good. If not, then start derivation goals for the derivations @@ -409,7 +428,7 @@ void DerivationGoal::repairClosure() } if (waitees.empty()) { - done(BuildResult::AlreadyValid); + done(BuildResult::AlreadyValid, assertPathValidity()); return; } @@ -423,7 +442,7 @@ void DerivationGoal::closureRepaired() if (nrFailed > 0) throw Error("some paths in the output closure of derivation '%s' could not be repaired", worker.store.printStorePath(drvPath)); - done(BuildResult::AlreadyValid); + done(BuildResult::AlreadyValid, assertPathValidity()); } @@ -434,13 +453,14 @@ void DerivationGoal::inputsRealised() if (nrFailed != 0) { if (!useDerivation) throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath)); - done(BuildResult::DependencyFailed, Error( + done(BuildResult::DependencyFailed, {}, Error( "%s dependencies of derivation '%s' failed to build", nrFailed, worker.store.printStorePath(drvPath))); return; } - if (retrySubstitution) { + if (retrySubstitution && !retriedSubstitution) { + retriedSubstitution = true; haveDerivation(); return; } @@ -454,19 +474,40 @@ void DerivationGoal::inputsRealised() if (useDerivation) { auto & fullDrv = *dynamic_cast<Derivation *>(drv.get()); - if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && - ((!fullDrv.inputDrvs.empty() && derivationIsCA(fullDrv.type())) - || fullDrv.type() == DerivationType::DeferredInputAddressed)) { + auto drvType = fullDrv.type(); + bool resolveDrv = std::visit(overloaded { + [&](const DerivationType::InputAddressed & ia) { + /* must resolve if deferred. */ + return ia.deferred; + }, + [&](const DerivationType::ContentAddressed & ca) { + return !fullDrv.inputDrvs.empty() && ( + ca.fixed + /* Can optionally resolve if fixed, which is good + for avoiding unnecessary rebuilds. */ + ? settings.isExperimentalFeatureEnabled(Xp::CaDerivations) + /* Must resolve if floating and there are any inputs + drvs. */ + : true); + }, + [&](const DerivationType::Impure &) { + return true; + } + }, drvType.raw()); + + if (resolveDrv && !fullDrv.inputDrvs.empty()) { + settings.requireExperimentalFeature(Xp::CaDerivations); + /* We are be able to resolve this derivation based on the - now-known results of dependencies. If so, we become a stub goal - aliasing that resolved derivation goal */ - std::optional attempt = fullDrv.tryResolve(worker.store); + now-known results of dependencies. If so, we become a + stub goal aliasing that resolved derivation goal. */ + std::optional attempt = fullDrv.tryResolve(worker.store, inputDrvOutputs); assert(attempt); Derivation drvResolved { *std::move(attempt) }; auto pathResolved = writeDerivation(worker.store, drvResolved); - auto msg = fmt("Resolved derivation: '%s' -> '%s'", + auto msg = fmt("resolved derivation: '%s' -> '%s'", worker.store.printStorePath(drvPath), worker.store.printStorePath(pathResolved)); act = std::make_unique<Activity>(*logger, lvlInfo, actBuildWaiting, msg, @@ -487,21 +528,13 @@ void DerivationGoal::inputsRealised() /* Add the relevant output closures of the input derivation `i' as input paths. Only add the closures of output paths that are specified as inputs. */ - assert(worker.evalStore.isValidPath(drvPath)); - auto outputs = worker.evalStore.queryPartialDerivationOutputMap(depDrvPath); - for (auto & j : wantedDepOutputs) { - if (outputs.count(j) > 0) { - auto optRealizedInput = outputs.at(j); - if (!optRealizedInput) - throw Error( - "derivation '%s' requires output '%s' from input derivation '%s', which is supposedly realized already, yet we still don't know what path corresponds to that output", - worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath)); - worker.store.computeFSClosure(*optRealizedInput, inputPaths); - } else + for (auto & j : wantedDepOutputs) + if (auto outPath = get(inputDrvOutputs, { depDrvPath, j })) + worker.store.computeFSClosure(*outPath, inputPaths); + else throw Error( "derivation '%s' requires non-existent output '%s' from input derivation '%s'", worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath)); - } } } @@ -515,7 +548,7 @@ void DerivationGoal::inputsRealised() /* Don't repeat fixed-output derivations since they're already verified by their output hash.*/ - nrRounds = derivationIsFixed(derivationType) ? 1 : settings.buildRepeat + 1; + nrRounds = derivationType.isFixed() ? 1 : settings.buildRepeat + 1; /* Okay, try to build. Note that here we don't wait for a build slot to become available, since we don't need one if there is a @@ -523,10 +556,11 @@ void DerivationGoal::inputsRealised() state = &DerivationGoal::tryToBuild; worker.wakeUp(shared_from_this()); - result = BuildResult(); + buildResult = BuildResult { .path = buildResult.path }; } -void DerivationGoal::started() { +void DerivationGoal::started() +{ auto msg = fmt( buildMode == bmRepair ? "repairing outputs of '%s'" : buildMode == bmCheck ? "checking outputs of '%s'" : @@ -588,19 +622,12 @@ void DerivationGoal::tryToBuild() omitted, but that would be less efficient.) Note that since we now hold the locks on the output paths, no other process can build this derivation, so no further checks are necessary. */ - checkPathValidity(); - bool allValid = true; - for (auto & [_, status] : initialOutputs) { - if (!status.wanted) continue; - if (!status.known || !status.known->isValid()) { - allValid = false; - break; - } - } + auto [allValid, validOutputs] = checkPathValidity(); + if (buildMode != bmCheck && allValid) { debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath)); outputLocks.setDeletion(true); - done(BuildResult::AlreadyValid); + done(BuildResult::AlreadyValid, std::move(validOutputs)); return; } @@ -626,7 +653,7 @@ void DerivationGoal::tryToBuild() /* Yes, it has started doing so. Wait until we get EOF from the hook. */ actLock.reset(); - result.startTime = time(0); // inexact + buildResult.startTime = time(0); // inexact state = &DerivationGoal::buildDone; started(); return; @@ -678,8 +705,7 @@ static void movePath(const Path & src, const Path & dst) if (changePerm) chmod_(src, st.st_mode | S_IWUSR); - if (rename(src.c_str(), dst.c_str())) - throw SysError("renaming '%1%' to '%2%'", src, dst); + renameFile(src, dst); if (changePerm) chmod_(dst, st.st_mode); @@ -759,8 +785,7 @@ void runPostBuildHook( Store & store, Logger & logger, const StorePath & drvPath, - StorePathSet outputPaths -) + const StorePathSet & outputPaths) { auto hook = settings.postBuildHook; if (hook == "") @@ -830,8 +855,8 @@ void DerivationGoal::buildDone() debug("builder process for '%s' finished", worker.store.printStorePath(drvPath)); - result.timesBuilt++; - result.stopTime = time(0); + buildResult.timesBuilt++; + buildResult.stopTime = time(0); /* So the child is gone now. */ worker.childTerminated(this); @@ -876,11 +901,11 @@ void DerivationGoal::buildDone() /* Compute the FS closure of the outputs and register them as being valid. */ - registerOutputs(); + auto builtOutputs = registerOutputs(); StorePathSet outputPaths; - for (auto & [_, path] : finalOutputs) - outputPaths.insert(path); + for (auto & [_, output] : builtOutputs) + outputPaths.insert(output.outPath); runPostBuildHook( worker.store, *logger, @@ -888,12 +913,6 @@ void DerivationGoal::buildDone() outputPaths ); - if (buildMode == bmCheck) { - cleanupPostOutputsRegisteredModeCheck(); - done(BuildResult::Built); - return; - } - cleanupPostOutputsRegisteredModeNonCheck(); /* Repeat the build if necessary. */ @@ -911,6 +930,8 @@ void DerivationGoal::buildDone() outputLocks.setDeletion(true); outputLocks.unlock(); + done(BuildResult::Built, std::move(builtOutputs)); + } catch (BuildError & e) { outputLocks.unlock(); @@ -926,71 +947,73 @@ void DerivationGoal::buildDone() st = dynamic_cast<NotDeterministic*>(&e) ? BuildResult::NotDeterministic : statusOk(status) ? BuildResult::OutputRejected : - derivationIsImpure(derivationType) || diskFull ? BuildResult::TransientFailure : + !derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure : BuildResult::PermanentFailure; } - done(st, e); + done(st, {}, e); return; } - - done(BuildResult::Built); } -void DerivationGoal::resolvedFinished() { +void DerivationGoal::resolvedFinished() +{ + trace("resolved derivation finished"); + assert(resolvedDrvGoal); auto resolvedDrv = *resolvedDrvGoal->drv; + auto & resolvedResult = resolvedDrvGoal->buildResult; - auto resolvedHashes = staticOutputHashes(worker.store, resolvedDrv); + DrvOutputs builtOutputs; - StorePathSet outputPaths; + if (resolvedResult.success()) { + auto resolvedHashes = staticOutputHashes(worker.store, resolvedDrv); - // `wantedOutputs` might be empty, which means “all the outputs” - auto realWantedOutputs = wantedOutputs; - if (realWantedOutputs.empty()) - realWantedOutputs = resolvedDrv.outputNames(); + StorePathSet outputPaths; - for (auto & wantedOutput : realWantedOutputs) { - assert(initialOutputs.count(wantedOutput) != 0); - assert(resolvedHashes.count(wantedOutput) != 0); - auto realisation = worker.store.queryRealisation( - DrvOutput{resolvedHashes.at(wantedOutput), wantedOutput} - ); - // We've just built it, but maybe the build failed, in which case the - // realisation won't be there - if (realisation) { - auto newRealisation = *realisation; - newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput}; - newRealisation.signatures.clear(); - newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath); - signRealisation(newRealisation); - worker.store.registerDrvOutput(newRealisation); + // `wantedOutputs` might be empty, which means “all the outputs” + auto realWantedOutputs = wantedOutputs; + if (realWantedOutputs.empty()) + realWantedOutputs = resolvedDrv.outputNames(); + + for (auto & wantedOutput : realWantedOutputs) { + auto initialOutput = get(initialOutputs, wantedOutput); + auto resolvedHash = get(resolvedHashes, wantedOutput); + if ((!initialOutput) || (!resolvedHash)) + throw Error( + "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)", + worker.store.printStorePath(drvPath), wantedOutput); + auto realisation = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput }); + if (!realisation) + throw Error( + "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)", + worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput); + if (drv->type().isPure()) { + auto newRealisation = *realisation; + newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput }; + newRealisation.signatures.clear(); + if (!drv->type().isFixed()) + newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath); + signRealisation(newRealisation); + worker.store.registerDrvOutput(newRealisation); + } outputPaths.insert(realisation->outPath); - } else { - // If we don't have a realisation, then it must mean that something - // failed when building the resolved drv - assert(!result.success()); + builtOutputs.emplace(realisation->id, *realisation); } + + runPostBuildHook( + worker.store, + *logger, + drvPath, + outputPaths + ); } - runPostBuildHook( - worker.store, - *logger, - drvPath, - outputPaths - ); - - auto status = [&]() { - auto resolvedResult = resolvedDrvGoal->getResult(); - switch (resolvedResult.status) { - case BuildResult::AlreadyValid: - return BuildResult::ResolvesToAlreadyValid; - default: - return resolvedResult.status; - } - }(); + auto status = resolvedResult.status; + if (status == BuildResult::AlreadyValid) + status = BuildResult::ResolvesToAlreadyValid; - done(status); + done(status, std::move(builtOutputs)); } HookReply DerivationGoal::tryBuildHook() @@ -1100,7 +1123,7 @@ HookReply DerivationGoal::tryBuildHook() } -void DerivationGoal::registerOutputs() +DrvOutputs DerivationGoal::registerOutputs() { /* When using a build hook, the build hook can register the output as valid (by doing `nix-store --import'). If so we don't have @@ -1109,21 +1132,7 @@ void DerivationGoal::registerOutputs() We can only early return when the outputs are known a priori. For floating content-addressed derivations this isn't the case. */ - for (auto & [outputName, optOutputPath] : worker.store.queryPartialDerivationOutputMap(drvPath)) { - if (!wantOutput(outputName, wantedOutputs)) - continue; - if (!optOutputPath) - throw BuildError( - "output '%s' from derivation '%s' does not have a known output path", - outputName, worker.store.printStorePath(drvPath)); - auto & outputPath = *optOutputPath; - if (!worker.store.isValidPath(outputPath)) - throw BuildError( - "output '%s' from derivation '%s' is supposed to be at '%s' but that path is not valid", - outputName, worker.store.printStorePath(drvPath), worker.store.printStorePath(outputPath)); - - finalOutputs.insert_or_assign(outputName, outputPath); - } + return assertPathValidity(); } Path DerivationGoal::openLogFile() @@ -1175,16 +1184,17 @@ bool DerivationGoal::isReadDesc(int fd) return fd == hook->builderOut.readSide.get(); } - void DerivationGoal::handleChildOutput(int fd, std::string_view data) { - if (isReadDesc(fd)) + // local & `ssh://`-builds are dealt with here. + auto isWrittenToLog = isReadDesc(fd); + if (isWrittenToLog) { logSize += data.size(); if (settings.maxLogSize && logSize > settings.maxLogSize) { killChild(); done( - BuildResult::LogLimitExceeded, + BuildResult::LogLimitExceeded, {}, Error("%s killed after writing more than %d bytes of log output", getName(), settings.maxLogSize)); return; @@ -1207,7 +1217,16 @@ void DerivationGoal::handleChildOutput(int fd, std::string_view data) if (hook && fd == hook->fromHook.readSide.get()) { for (auto c : data) if (c == '\n') { - handleJSONLogMessage(currentHookLine, worker.act, hook->activities, true); + auto json = parseJSONMessage(currentHookLine); + if (json) { + auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true); + // ensure that logs from a builder using `ssh-ng://` as protocol + // are also available to `nix log`. + if (s && !isWrittenToLog && logSink && (*json)["type"] == resBuildLogLine) { + auto f = (*json)["fields"]; + (*logSink)((f.size() > 0 ? f.at(0).get<std::string>() : "") + "\n"); + } + } currentHookLine.clear(); } else currentHookLine += c; @@ -1241,7 +1260,8 @@ void DerivationGoal::flushLine() std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap() { - if (!useDerivation || drv->type() != DerivationType::CAFloating) { + assert(drv->type().isPure()); + if (!useDerivation || drv->type().hasKnownOutputPaths()) { std::map<std::string, std::optional<StorePath>> res; for (auto & [name, output] : drv->outputs) res.insert_or_assign(name, output.path(worker.store, drv->name, name)); @@ -1253,7 +1273,8 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDeri OutputPathMap DerivationGoal::queryDerivationOutputMap() { - if (!useDerivation || drv->type() != DerivationType::CAFloating) { + assert(drv->type().isPure()); + if (!useDerivation || drv->type().hasKnownOutputPaths()) { OutputPathMap res; for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) res.insert_or_assign(name, *output.second); @@ -1264,12 +1285,20 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() } -void DerivationGoal::checkPathValidity() +std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity() { + if (!drv->type().isPure()) return { false, {} }; + bool checkHash = buildMode == bmRepair; auto wantedOutputsLeft = wantedOutputs; + DrvOutputs validOutputs; + for (auto & i : queryPartialDerivationOutputMap()) { - InitialOutput & info = initialOutputs.at(i.first); + auto initialOutput = get(initialOutputs, i.first); + if (!initialOutput) + // this is an invalid output, gets catched with (!wantedOutputsLeft.empty()) + continue; + auto & info = *initialOutput; info.wanted = wantOutput(i.first, wantedOutputs); if (info.wanted) wantedOutputsLeft.erase(i.first); @@ -1284,27 +1313,30 @@ void DerivationGoal::checkPathValidity() : PathStatus::Corrupt, }; } + auto drvOutput = DrvOutput{info.outputHash, i.first}; if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) { - auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first}; if (auto real = worker.store.queryRealisation(drvOutput)) { info.known = { .path = real->outPath, .status = PathStatus::Valid, }; - } else if (info.known && info.known->status == PathStatus::Valid) { - // We know the output because it' a static output of the + } else if (info.known && info.known->isValid()) { + // We know the output because it's a static output of the // derivation, and the output path is valid, but we don't have // its realisation stored (probably because it has been built - // without the `ca-derivations` experimental flag) + // without the `ca-derivations` experimental flag). worker.store.registerDrvOutput( - Realisation{ + Realisation { drvOutput, info.known->path, } ); } } + if (info.wanted && info.known && info.known->isValid()) + validOutputs.emplace(drvOutput, Realisation { drvOutput, info.known->path }); } + // If we requested all the outputs via the empty set, we are always fine. // If we requested specific elements, the loop above removes all the valid // ones, so any that are left must be invalid. @@ -1312,24 +1344,48 @@ void DerivationGoal::checkPathValidity() throw Error("derivation '%s' does not have wanted outputs %s", worker.store.printStorePath(drvPath), concatStringsSep(", ", quoteStrings(wantedOutputsLeft))); + + bool allValid = true; + for (auto & [_, status] : initialOutputs) { + if (!status.wanted) continue; + if (!status.known || !status.known->isValid()) { + allValid = false; + break; + } + } + + return { allValid, validOutputs }; } -void DerivationGoal::done(BuildResult::Status status, std::optional<Error> ex) +DrvOutputs DerivationGoal::assertPathValidity() { - result.status = status; + auto [allValid, validOutputs] = checkPathValidity(); + if (!allValid) + throw Error("some outputs are unexpectedly invalid"); + return validOutputs; +} + + +void DerivationGoal::done( + BuildResult::Status status, + DrvOutputs builtOutputs, + std::optional<Error> ex) +{ + buildResult.status = status; if (ex) - result.errorMsg = ex->what(); - amDone(result.success() ? ecSuccess : ecFailed, ex); - if (result.status == BuildResult::TimedOut) + buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg)); + if (buildResult.status == BuildResult::TimedOut) worker.timedOut = true; - if (result.status == BuildResult::PermanentFailure) + if (buildResult.status == BuildResult::PermanentFailure) worker.permanentFailure = true; mcExpectedBuilds.reset(); mcRunningBuilds.reset(); - if (result.success()) { + if (buildResult.success()) { + assert(!builtOutputs.empty()); + buildResult.builtOutputs = std::move(builtOutputs); if (status == BuildResult::Built) worker.doneBuilds++; } else { @@ -1343,9 +1399,23 @@ void DerivationGoal::done(BuildResult::Status status, std::optional<Error> ex) if (traceBuiltOutputsFile != "") { std::fstream fs; fs.open(traceBuiltOutputsFile, std::fstream::out); - fs << worker.store.printStorePath(drvPath) << "\t" << result.toString() << std::endl; + fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl; } + + amDone(buildResult.success() ? ecSuccess : ecFailed, ex); } +void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result) +{ + Goal::waiteeDone(waitee, result); + + if (waitee->buildResult.success()) + if (auto bfd = std::get_if<DerivedPath::Built>(&waitee->buildResult.path)) + for (auto & [output, realisation] : waitee->buildResult.builtOutputs) + inputDrvOutputs.insert_or_assign( + { bfd->drvPath, output.outputName }, + realisation.outPath); +} + } diff --git a/src/libstore/build/derivation-goal.hh b/src/libstore/build/derivation-goal.hh index d947482ef..2d8bfd592 100644 --- a/src/libstore/build/derivation-goal.hh +++ b/src/libstore/build/derivation-goal.hh @@ -57,12 +57,21 @@ struct DerivationGoal : public Goal them. */ StringSet wantedOutputs; + /* Mapping from input derivations + output names to actual store + paths. This is filled in by waiteeDone() as each dependency + finishes, before inputsRealised() is reached, */ + std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs; + /* Whether additional wanted outputs have been added. */ bool needRestart = false; /* Whether to retry substituting the outputs after building the - inputs. */ - bool retrySubstitution; + inputs. This is done in case of an incomplete closure. */ + bool retrySubstitution = false; + + /* Whether we've retried substitution, in which case we won't try + again. */ + bool retriedSubstitution = false; /* The derivation stored at drvPath. */ std::unique_ptr<Derivation> drv; @@ -104,20 +113,8 @@ struct DerivationGoal : public Goal typedef void (DerivationGoal::*GoalState)(); GoalState state; - /* The final output paths of the build. - - - For input-addressed derivations, always the precomputed paths - - - For content-addressed derivations, calcuated from whatever the hash - ends up being. (Note that fixed outputs derivations that produce the - "wrong" output still install that data under its true content-address.) - */ - OutputPathMap finalOutputs; - BuildMode buildMode; - BuildResult result; - /* The current round, if we're building multiple times. */ size_t curRound = 1; @@ -152,8 +149,6 @@ struct DerivationGoal : public Goal /* Add wanted outputs to an already existing derivation goal. */ void addWantedOutputs(const StringSet & outputs); - BuildResult getResult() { return result; } - /* The states. */ void getDerivation(); void loadDerivation(); @@ -175,7 +170,7 @@ struct DerivationGoal : public Goal /* Check that the derivation outputs all exist and register them as valid. */ - virtual void registerOutputs(); + virtual DrvOutputs registerOutputs(); /* Open a log file and a pipe to it. */ Path openLogFile(); @@ -210,8 +205,17 @@ struct DerivationGoal : public Goal std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap(); OutputPathMap queryDerivationOutputMap(); - /* Return the set of (in)valid paths. */ - void checkPathValidity(); + /* Update 'initialOutputs' to determine the current status of the + outputs of the derivation. Also returns a Boolean denoting + whether all outputs are valid and non-corrupt, and a + 'DrvOutputs' structure containing the valid and wanted + outputs. */ + std::pair<bool, DrvOutputs> checkPathValidity(); + + /* Aborts if any output is not valid or corrupt, and otherwise + returns a 'DrvOutputs' structure containing the wanted + outputs. */ + DrvOutputs assertPathValidity(); /* Forcibly kill the child process, if any. */ virtual void killChild(); @@ -222,8 +226,11 @@ struct DerivationGoal : public Goal void done( BuildResult::Status status, + DrvOutputs builtOutputs = {}, std::optional<Error> ex = {}); + void waiteeDone(GoalPtr waitee, ExitCode result) override; + StorePathSet exportReferences(const StorePathSet & storePaths); }; diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc index e6e810cdd..b7f7b5ab1 100644 --- a/src/libstore/build/drv-output-substitution-goal.cc +++ b/src/libstore/build/drv-output-substitution-goal.cc @@ -6,8 +6,12 @@ namespace nix { -DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca) - : Goal(worker) +DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal( + const DrvOutput & id, + Worker & worker, + RepairFlag repair, + std::optional<ContentAddress> ca) + : Goal(worker, DerivedPath::Opaque { StorePath::dummy }) , id(id) { state = &DrvOutputSubstitutionGoal::init; @@ -32,12 +36,12 @@ void DrvOutputSubstitutionGoal::init() void DrvOutputSubstitutionGoal::tryNext() { - trace("Trying next substituter"); + trace("trying next substituter"); if (subs.size() == 0) { /* None left. Terminate this goal and let someone else deal with it. */ - debug("drv output '%s' is required, but there is no substituter that can provide it", id.to_string()); + debug("derivation output '%s' is required, but there is no substituter that can provide it", id.to_string()); /* Hack: don't indicate failure if there were no substituters. In that case the calling derivation should just do a @@ -119,7 +123,7 @@ void DrvOutputSubstitutionGoal::realisationFetched() void DrvOutputSubstitutionGoal::outPathValid() { assert(outputInfo); - trace("Output path substituted"); + trace("output path substituted"); if (nrFailed > 0) { debug("The output path of the derivation output '%s' could not be substituted", id.to_string()); diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 9b4cfd835..bea7363db 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -47,43 +47,51 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod } } +std::vector<BuildResult> Store::buildPathsWithResults( + const std::vector<DerivedPath> & reqs, + BuildMode buildMode, + std::shared_ptr<Store> evalStore) +{ + Worker worker(*this, evalStore ? *evalStore : *this); + + Goals goals; + for (const auto & br : reqs) { + std::visit(overloaded { + [&](const DerivedPath::Built & bfd) { + goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode)); + }, + [&](const DerivedPath::Opaque & bo) { + goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair)); + }, + }, br.raw()); + } + + worker.run(goals); + + std::vector<BuildResult> results; + + for (auto & i : goals) + results.push_back(i->buildResult); + + return results; +} + BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) { Worker worker(*this, *this); auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode); - BuildResult result; - try { worker.run(Goals{goal}); - result = goal->getResult(); + return goal->buildResult; } catch (Error & e) { - result.status = BuildResult::MiscFailure; - result.errorMsg = e.msg(); - } - // XXX: Should use `goal->queryPartialDerivationOutputMap()` once it's - // extended to return the full realisation for each output - auto staticDrvOutputs = drv.outputsAndOptPaths(*this); - auto outputHashes = staticOutputHashes(*this, drv); - for (auto & [outputName, staticOutput] : staticDrvOutputs) { - auto outputId = DrvOutput{outputHashes.at(outputName), outputName}; - if (staticOutput.second) - result.builtOutputs.insert_or_assign( - outputId, - Realisation{ outputId, *staticOutput.second} - ); - if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) { - auto realisation = this->queryRealisation(outputId); - if (realisation) - result.builtOutputs.insert_or_assign( - outputId, - *realisation - ); - } - } - - return result; + return BuildResult { + .status = BuildResult::MiscFailure, + .errorMsg = e.msg(), + .path = DerivedPath::Built { .drvPath = drvPath }, + }; + }; } diff --git a/src/libstore/build/goal.cc b/src/libstore/build/goal.cc index d2420b107..58e805f55 100644 --- a/src/libstore/build/goal.cc +++ b/src/libstore/build/goal.cc @@ -28,7 +28,7 @@ void Goal::addWaitee(GoalPtr waitee) void Goal::waiteeDone(GoalPtr waitee, ExitCode result) { - assert(waitees.find(waitee) != waitees.end()); + assert(waitees.count(waitee)); waitees.erase(waitee); trace(fmt("waitee '%s' done; %d left", waitee->name, waitees.size())); diff --git a/src/libstore/build/goal.hh b/src/libstore/build/goal.hh index 68e08bdf9..35121c5d9 100644 --- a/src/libstore/build/goal.hh +++ b/src/libstore/build/goal.hh @@ -2,6 +2,7 @@ #include "types.hh" #include "store-api.hh" +#include "build-result.hh" namespace nix { @@ -39,30 +40,32 @@ struct Goal : public std::enable_shared_from_this<Goal> WeakGoals waiters; /* Number of goals we are/were waiting for that have failed. */ - unsigned int nrFailed; + size_t nrFailed = 0; /* Number of substitution goals we are/were waiting for that failed because there are no substituters. */ - unsigned int nrNoSubstituters; + size_t nrNoSubstituters = 0; /* Number of substitution goals we are/were waiting for that failed because they had unsubstitutable references. */ - unsigned int nrIncompleteClosure; + size_t nrIncompleteClosure = 0; /* Name of this goal for debugging purposes. */ std::string name; /* Whether the goal is finished. */ - ExitCode exitCode; + ExitCode exitCode = ecBusy; + + /* Build result. */ + BuildResult buildResult; /* Exception containing an error message, if any. */ std::optional<Error> ex; - Goal(Worker & worker) : worker(worker) - { - nrFailed = nrNoSubstituters = nrIncompleteClosure = 0; - exitCode = ecBusy; - } + Goal(Worker & worker, DerivedPath path) + : worker(worker) + , buildResult { .path = std::move(path) } + { } virtual ~Goal() { diff --git a/src/libstore/build/hook-instance.cc b/src/libstore/build/hook-instance.cc index 0f6f580be..cb58a1f02 100644 --- a/src/libstore/build/hook-instance.cc +++ b/src/libstore/build/hook-instance.cc @@ -7,6 +7,22 @@ HookInstance::HookInstance() { debug("starting build hook '%s'", settings.buildHook); + auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get()); + + if (buildHookArgs.empty()) + throw Error("'build-hook' setting is empty"); + + auto buildHook = buildHookArgs.front(); + buildHookArgs.pop_front(); + + Strings args; + args.push_back(std::string(baseNameOf(buildHook))); + + for (auto & arg : buildHookArgs) + args.push_back(arg); + + args.push_back(std::to_string(verbosity)); + /* Create a pipe to get the output of the child. */ fromHook.create(); @@ -36,14 +52,9 @@ HookInstance::HookInstance() if (dup2(builderOut.readSide.get(), 5) == -1) throw SysError("dupping builder's stdout/stderr"); - Strings args = { - std::string(baseNameOf(settings.buildHook.get())), - std::to_string(verbosity), - }; - - execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data()); + execv(buildHook.c_str(), stringsToCharPtrs(args).data()); - throw SysError("executing '%s'", settings.buildHook); + throw SysError("executing '%s'", buildHook); }); pid.setSeparatePG(true); diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 247213166..64540d262 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1,4 +1,5 @@ #include "local-derivation-goal.hh" +#include "gc-store.hh" #include "hook-instance.hh" #include "worker.hh" #include "builtins.hh" @@ -13,6 +14,7 @@ #include "worker-protocol.hh" #include "topo-sort.hh" #include "callback.hh" +#include "json-utils.hh" #include <regex> #include <queue> @@ -55,8 +57,6 @@ #include <pwd.h> #include <grp.h> -#include <nlohmann/json.hpp> - namespace nix { void handleDiffHook( @@ -186,7 +186,7 @@ void LocalDerivationGoal::tryLocalBuild() { outputLocks.unlock(); buildUser.reset(); worker.permanentFailure = true; - done(BuildResult::InputRejected, e); + done(BuildResult::InputRejected, {}, e); return; } @@ -216,8 +216,7 @@ static void movePath(const Path & src, const Path & dst) if (changePerm) chmod_(src, st.st_mode | S_IWUSR); - if (rename(src.c_str(), dst.c_str())) - throw SysError("renaming '%1%' to '%2%'", src, dst); + renameFile(src, dst); if (changePerm) chmod_(dst, st.st_mode); @@ -304,7 +303,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull() if (buildMode != bmCheck && status.known->isValid()) continue; auto p = worker.store.printStorePath(status.known->path); if (pathExists(chrootRootDir + p)) - rename((chrootRootDir + p).c_str(), p.c_str()); + renameFile((chrootRootDir + p), p); } return diskFull; @@ -387,7 +386,7 @@ void LocalDerivationGoal::startBuilder() else if (settings.sandboxMode == smDisabled) useChroot = false; else if (settings.sandboxMode == smRelaxed) - useChroot = !(derivationIsImpure(derivationType)) && !noChroot; + useChroot = derivationType.isSandboxed() && !noChroot; } auto & localStore = getLocalStore(); @@ -474,7 +473,7 @@ void LocalDerivationGoal::startBuilder() temporary build directory. The text files have the format used by `nix-store --register-validity'. However, the deriver fields are left empty. */ - auto s = get(drv->env, "exportReferencesGraph").value_or(""); + auto s = getOr(drv->env, "exportReferencesGraph", ""); Strings ss = tokenizeString<Strings>(s); if (ss.size() % 2 != 0) throw BuildError("odd number of tokens in 'exportReferencesGraph': '%1%'", s); @@ -608,7 +607,7 @@ void LocalDerivationGoal::startBuilder() "nogroup:x:65534:\n", sandboxGid())); /* Create /etc/hosts with localhost entry. */ - if (!(derivationIsImpure(derivationType))) + if (derivationType.isSandboxed()) writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n"); /* Make the closure of the inputs available in the chroot, @@ -724,6 +723,9 @@ void LocalDerivationGoal::startBuilder() /* Run the builder. */ printMsg(lvlChatty, "executing builder '%1%'", drv->builder); + printMsg(lvlChatty, "using builder args '%1%'", concatStringsSep(" ", drv->args)); + for (auto & i : drv->env) + printMsg(lvlVomit, "setting builder env variable '%1%'='%2%'", i.first, i.second); /* Create the log file. */ Path logFile = openLogFile(); @@ -776,7 +778,7 @@ void LocalDerivationGoal::startBuilder() if (tcsetattr(builderOut.writeSide.get(), TCSANOW, &term)) throw SysError("putting pseudoterminal into raw mode"); - result.startTime = time(0); + buildResult.startTime = time(0); /* Fork a child to build the package. */ @@ -816,7 +818,7 @@ void LocalDerivationGoal::startBuilder() us. */ - if (!(derivationIsImpure(derivationType))) + if (derivationType.isSandboxed()) privateNetwork = true; userNamespaceSync.create(); @@ -863,18 +865,43 @@ void LocalDerivationGoal::startBuilder() /* Some distros patch Linux to not allow unprivileged * user namespaces. If we get EPERM or EINVAL, try * without CLONE_NEWUSER and see if that works. + * Details: https://salsa.debian.org/kernel-team/linux/-/commit/d98e00eda6bea437e39b9e80444eee84a32438a6 */ usingUserNamespace = false; flags &= ~CLONE_NEWUSER; child = clone(childEntry, stack + stackSize, flags, this); } - /* Otherwise exit with EPERM so we can handle this in the - parent. This is only done when sandbox-fallback is set - to true (the default). */ - if (child == -1 && (errno == EPERM || errno == EINVAL) && settings.sandboxFallback) - _exit(1); - if (child == -1) throw SysError("cloning builder process"); - + if (child == -1) { + switch(errno) { + case EPERM: + case EINVAL: { + int errno_ = errno; + if (!userNamespacesEnabled && errno==EPERM) + notice("user namespaces appear to be disabled; they are required for sandboxing; check /proc/sys/user/max_user_namespaces"); + if (userNamespacesEnabled) { + Path procSysKernelUnprivilegedUsernsClone = "/proc/sys/kernel/unprivileged_userns_clone"; + if (pathExists(procSysKernelUnprivilegedUsernsClone) + && trim(readFile(procSysKernelUnprivilegedUsernsClone)) == "0") { + notice("user namespaces appear to be disabled; they are required for sandboxing; check /proc/sys/kernel/unprivileged_userns_clone"); + } + } + Path procSelfNsUser = "/proc/self/ns/user"; + if (!pathExists(procSelfNsUser)) + notice("/proc/self/ns/user does not exist; your kernel was likely built without CONFIG_USER_NS=y, which is required for sandboxing"); + /* Otherwise exit with EPERM so we can handle this in the + parent. This is only done when sandbox-fallback is set + to true (the default). */ + if (settings.sandboxFallback) + _exit(1); + /* Mention sandbox-fallback in the error message so the user + knows that having it disabled contributed to the + unrecoverability of this failure */ + throw SysError(errno_, "creating sandboxed builder process using clone(), without sandbox-fallback"); + } + default: + throw SysError("creating sandboxed builder process using clone()"); + } + } writeFull(builderOut.writeSide.get(), fmt("%d %d\n", usingUserNamespace, child)); _exit(0); @@ -1014,7 +1041,7 @@ void LocalDerivationGoal::initTmpDir() { there is no size constraint). */ if (!parsedDrv->getStructuredAttrs()) { - StringSet passAsFile = tokenizeString<StringSet>(get(drv->env, "passAsFile").value_or("")); + StringSet passAsFile = tokenizeString<StringSet>(getOr(drv->env, "passAsFile", "")); for (auto & i : drv->env) { if (passAsFile.find(i.first) == passAsFile.end()) { env[i.first] = i.second; @@ -1077,7 +1104,7 @@ void LocalDerivationGoal::initEnv() derivation, tell the builder, so that for instance `fetchurl' can skip checking the output. On older Nixes, this environment variable won't be set, so `fetchurl' will do the check. */ - if (derivationIsFixed(derivationType)) env["NIX_OUTPUT_CHECKED"] = "1"; + if (derivationType.isFixed()) env["NIX_OUTPUT_CHECKED"] = "1"; /* *Only* if this is a fixed-output derivation, propagate the values of the environment variables specified in the @@ -1088,7 +1115,7 @@ void LocalDerivationGoal::initEnv() to the builder is generally impure, but the output of fixed-output derivations is by definition pure (since we already know the cryptographic hash of the output). */ - if (derivationIsImpure(derivationType)) { + if (!derivationType.isSandboxed()) { for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) env[i] = getEnv(i).value_or(""); } @@ -1156,7 +1183,7 @@ struct RestrictedStoreConfig : virtual LocalFSStoreConfig /* A wrapper around LocalStore that only allows building/querying of paths that are in the input closures of the build or were added via recursive Nix calls. */ -struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual LocalFSStore +struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual LocalFSStore, public virtual GcStore { ref<LocalStore> next; @@ -1289,6 +1316,16 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override { + for (auto & result : buildPathsWithResults(paths, buildMode, evalStore)) + if (!result.success()) + result.rethrow(); + } + + std::vector<BuildResult> buildPathsWithResults( + const std::vector<DerivedPath> & paths, + BuildMode buildMode = bmNormal, + std::shared_ptr<Store> evalStore = nullptr) override + { assert(!evalStore); if (buildMode != bmNormal) throw Error("unsupported build mode"); @@ -1301,26 +1338,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo throw InvalidPath("cannot build '%s' in recursive Nix because path is unknown", req.to_string(*next)); } - next->buildPaths(paths, buildMode); - - for (auto & path : paths) { - auto p = std::get_if<DerivedPath::Built>(&path); - if (!p) continue; - auto & bfd = *p; - auto drv = readDerivation(bfd.drvPath); - auto drvHashes = staticOutputHashes(*this, drv); - auto outputs = next->queryDerivationOutputMap(bfd.drvPath); - for (auto & [outputName, outputPath] : outputs) - if (wantOutput(outputName, bfd.outputs)) { - newPaths.insert(outputPath); - if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) { - auto thisRealisation = next->queryRealisation( - DrvOutput{drvHashes.at(outputName), outputName} - ); - assert(thisRealisation); - newRealisations.insert(*thisRealisation); - } - } + auto results = next->buildPathsWithResults(paths, buildMode); + + for (auto & result : results) { + for (auto & [outputName, output] : result.builtOutputs) { + newPaths.insert(output.outPath); + newRealisations.insert(output); + } } StorePathSet closure; @@ -1329,6 +1353,8 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo goal.addDependency(path); for (auto & real : Realisation::closure(*next, newRealisations)) goal.addedDrvOutputs.insert(real.id); + + return results; } BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, @@ -1369,6 +1395,12 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo next->queryMissing(allowed, willBuild, willSubstitute, unknown, downloadSize, narSize); } + + virtual std::optional<std::string> getBuildLog(const StorePath & path) override + { return std::nullopt; } + + virtual void addBuildLog(const StorePath & path, std::string_view log) override + { unsupported("addBuildLog"); } }; @@ -1591,6 +1623,8 @@ void LocalDerivationGoal::runChild() /* Warning: in the child we should absolutely not make any SQLite calls! */ + bool sendException = true; + try { /* child */ commonChildInit(builderOut); @@ -1697,7 +1731,7 @@ void LocalDerivationGoal::runChild() /* Fixed-output derivations typically need to access the network, so give them access to /etc/resolv.conf and so on. */ - if (derivationIsImpure(derivationType)) { + if (!derivationType.isSandboxed()) { // Only use nss functions to resolve hosts and // services. Don’t use it for anything else that may // be configured for this system. This limits the @@ -1738,7 +1772,19 @@ void LocalDerivationGoal::runChild() for (auto & i : dirsInChroot) { if (i.second.source == "/proc") continue; // backwards compatibility - doBind(i.second.source, chrootRootDir + i.first, i.second.optional); + + #if HAVE_EMBEDDED_SANDBOX_SHELL + if (i.second.source == "__embedded_sandbox_shell__") { + static unsigned char sh[] = { + #include "embedded-sandbox-shell.gen.hh" + }; + auto dst = chrootRootDir + i.first; + createDirs(dirOf(dst)); + writeFile(dst, std::string_view((const char *) sh, sizeof(sh))); + chmod_(dst, 0555); + } else + #endif + doBind(i.second.source, chrootRootDir + i.first, i.second.optional); } /* Bind a new instance of procfs on /proc. */ @@ -1954,7 +2000,7 @@ void LocalDerivationGoal::runChild() sandboxProfile += "(import \"sandbox-defaults.sb\")\n"; - if (derivationIsImpure(derivationType)) + if (!derivationType.isSandboxed()) sandboxProfile += "(import \"sandbox-network.sb\")\n"; /* Add the output paths we'll use at build-time to the chroot */ @@ -2048,6 +2094,8 @@ void LocalDerivationGoal::runChild() /* Indicate that we managed to set up the build environment. */ writeFull(STDERR_FILENO, std::string("\2\n")); + sendException = false; + /* Execute the program. This should not return. */ if (drv->isBuiltin()) { try { @@ -2101,16 +2149,19 @@ void LocalDerivationGoal::runChild() throw SysError("executing '%1%'", drv->builder); } catch (Error & e) { - writeFull(STDERR_FILENO, "\1\n"); - FdSink sink(STDERR_FILENO); - sink << e; - sink.flush(); + if (sendException) { + writeFull(STDERR_FILENO, "\1\n"); + FdSink sink(STDERR_FILENO); + sink << e; + sink.flush(); + } else + std::cerr << e.msg(); _exit(1); } } -void LocalDerivationGoal::registerOutputs() +DrvOutputs LocalDerivationGoal::registerOutputs() { /* When using a build hook, the build hook can register the output as valid (by doing `nix-store --import'). If so we don't have @@ -2119,10 +2170,8 @@ void LocalDerivationGoal::registerOutputs() We can only early return when the outputs are known a priori. For floating content-addressed derivations this isn't the case. */ - if (hook) { - DerivationGoal::registerOutputs(); - return; - } + if (hook) + return DerivationGoal::registerOutputs(); std::map<std::string, ValidPathInfo> infos; @@ -2163,12 +2212,22 @@ void LocalDerivationGoal::registerOutputs() std::map<std::string, std::variant<AlreadyRegistered, PerhapsNeedToRegister>> outputReferencesIfUnregistered; std::map<std::string, struct stat> outputStats; for (auto & [outputName, _] : drv->outputs) { - auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchOutputs.at(outputName))); + auto scratchOutput = get(scratchOutputs, outputName); + if (!scratchOutput) + throw BuildError( + "builder for '%s' has no scratch output for '%s'", + worker.store.printStorePath(drvPath), outputName); + auto actualPath = toRealPathChroot(worker.store.printStorePath(*scratchOutput)); outputsToSort.insert(outputName); /* Updated wanted info to remove the outputs we definitely don't need to register */ - auto & initialInfo = initialOutputs.at(outputName); + auto initialOutput = get(initialOutputs, outputName); + if (!initialOutput) + throw BuildError( + "builder for '%s' has no initial output for '%s'", + worker.store.printStorePath(drvPath), outputName); + auto & initialInfo = *initialOutput; /* Don't register if already valid, and not checking */ initialInfo.wanted = buildMode == bmCheck @@ -2223,6 +2282,11 @@ void LocalDerivationGoal::registerOutputs() auto sortedOutputNames = topoSort(outputsToSort, {[&](const std::string & name) { + auto orifu = get(outputReferencesIfUnregistered, name); + if (!orifu) + throw BuildError( + "no output reference for '%s' in build of '%s'", + name, worker.store.printStorePath(drvPath)); return std::visit(overloaded { /* Since we'll use the already installed versions of these, we can treat them as leaves and ignore any references they @@ -2237,7 +2301,7 @@ void LocalDerivationGoal::registerOutputs() referencedOutputs.insert(o); return referencedOutputs; }, - }, outputReferencesIfUnregistered.at(name)); + }, *orifu); }}, {[&](const std::string & path, const std::string & parent) { // TODO with more -vvvv also show the temporary paths for manual inspection. @@ -2248,10 +2312,13 @@ void LocalDerivationGoal::registerOutputs() std::reverse(sortedOutputNames.begin(), sortedOutputNames.end()); + OutputPathMap finalOutputs; + for (auto & outputName : sortedOutputNames) { - auto output = drv->outputs.at(outputName); - auto & scratchPath = scratchOutputs.at(outputName); - auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchPath)); + auto output = get(drv->outputs, outputName); + auto scratchPath = get(scratchOutputs, outputName); + assert(output && scratchPath); + auto actualPath = toRealPathChroot(worker.store.printStorePath(*scratchPath)); auto finish = [&](StorePath finalStorePath) { /* Store the final path */ @@ -2259,10 +2326,13 @@ void LocalDerivationGoal::registerOutputs() /* The rewrite rule will be used in downstream outputs that refer to use. This is why the topological sort is essential to do first before this for loop. */ - if (scratchPath != finalStorePath) - outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() }; + if (*scratchPath != finalStorePath) + outputRewrites[std::string { scratchPath->hashPart() }] = std::string { finalStorePath.hashPart() }; }; + auto orifu = get(outputReferencesIfUnregistered, outputName); + assert(orifu); + std::optional<StorePathSet> referencesOpt = std::visit(overloaded { [&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> { finish(skippedFinalPath.path); @@ -2271,7 +2341,7 @@ void LocalDerivationGoal::registerOutputs() [&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> { return r.refs; }, - }, outputReferencesIfUnregistered.at(outputName)); + }, *orifu); if (!referencesOpt) continue; @@ -2308,25 +2378,29 @@ void LocalDerivationGoal::registerOutputs() for (auto & r : references) { auto name = r.name(); auto origHash = std::string { r.hashPart() }; - if (r == scratchPath) + if (r == *scratchPath) { res.first = true; - else if (outputRewrites.count(origHash) == 0) - res.second.insert(r); - else { - std::string newRef = outputRewrites.at(origHash); + } else if (auto outputRewrite = get(outputRewrites, origHash)) { + std::string newRef = *outputRewrite; newRef += '-'; newRef += name; res.second.insert(StorePath { newRef }); + } else { + res.second.insert(r); } } return res; }; - auto newInfoFromCA = [&](const DerivationOutputCAFloating outputHash) -> ValidPathInfo { - auto & st = outputStats.at(outputName); + auto newInfoFromCA = [&](const DerivationOutput::CAFloating outputHash) -> ValidPathInfo { + auto st = get(outputStats, outputName); + if (!st) + throw BuildError( + "output path %1% without valid stats info", + actualPath); if (outputHash.method == FileIngestionMethod::Flat) { /* The output path should be a regular file without execute permission. */ - if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0) + if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0) throw BuildError( "output path '%1%' should be a non-executable regular file " "since recursive hashing is not enabled (outputHashMode=flat)", @@ -2334,7 +2408,7 @@ void LocalDerivationGoal::registerOutputs() } rewriteOutput(); /* FIXME optimize and deduplicate with addToStore */ - std::string oldHashPart { scratchPath.hashPart() }; + std::string oldHashPart { scratchPath->hashPart() }; HashModuloSink caSink { outputHash.hashType, oldHashPart }; switch (outputHash.method) { case FileIngestionMethod::Recursive: @@ -2353,13 +2427,11 @@ void LocalDerivationGoal::registerOutputs() outputPathName(drv->name, outputName), refs.second, refs.first); - if (scratchPath != finalPath) { + if (*scratchPath != finalPath) { // Also rewrite the output path auto source = sinkToSource([&](Sink & nextSink) { - StringSink sink; - dumpPath(actualPath, sink); RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink); - rsink2(sink.s); + dumpPath(actualPath, rsink2); rsink2.flush(); }); Path tmpPath = actualPath + ".tmp"; @@ -2388,14 +2460,15 @@ void LocalDerivationGoal::registerOutputs() }; ValidPathInfo newInfo = std::visit(overloaded { - [&](const DerivationOutputInputAddressed & output) { + + [&](const DerivationOutput::InputAddressed & output) { /* input-addressed case */ auto requiredFinalPath = output.path; /* Preemptively add rewrite rule for final hash, as that is what the NAR hash will use rather than normalized-self references */ - if (scratchPath != requiredFinalPath) + if (*scratchPath != requiredFinalPath) outputRewrites.insert_or_assign( - std::string { scratchPath.hashPart() }, + std::string { scratchPath->hashPart() }, std::string { requiredFinalPath.hashPart() }); rewriteOutput(); auto narHashAndSize = hashPath(htSHA256, actualPath); @@ -2407,8 +2480,9 @@ void LocalDerivationGoal::registerOutputs() newInfo0.references.insert(newInfo0.path); return newInfo0; }, - [&](const DerivationOutputCAFixed & dof) { - auto newInfo0 = newInfoFromCA(DerivationOutputCAFloating { + + [&](const DerivationOutput::CAFixed & dof) { + auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating { .method = dof.hash.method, .hashType = dof.hash.hash.type, }); @@ -2429,15 +2503,25 @@ void LocalDerivationGoal::registerOutputs() } return newInfo0; }, - [&](DerivationOutputCAFloating dof) { + + [&](const DerivationOutput::CAFloating & dof) { return newInfoFromCA(dof); }, - [&](DerivationOutputDeferred) -> ValidPathInfo { + + [&](const DerivationOutput::Deferred &) -> ValidPathInfo { // No derivation should reach that point without having been // rewritten first assert(false); }, - }, output.output); + + [&](const DerivationOutput::Impure & doi) { + return newInfoFromCA(DerivationOutput::CAFloating { + .method = doi.method, + .hashType = doi.hashType, + }); + }, + + }, output->raw()); /* FIXME: set proper permissions in restorePath() so we don't have to do another traversal. */ @@ -2453,7 +2537,7 @@ void LocalDerivationGoal::registerOutputs() derivations. */ PathLocks dynamicOutputLock; dynamicOutputLock.setDeletion(true); - auto optFixedPath = output.path(worker.store, drv->name, outputName); + auto optFixedPath = output->path(worker.store, drv->name, outputName); if (!optFixedPath || worker.store.printStorePath(*optFixedPath) != finalDestPath) { @@ -2519,11 +2603,10 @@ void LocalDerivationGoal::registerOutputs() /* For debugging, print out the referenced and unreferenced paths. */ for (auto & i : inputPaths) { - auto j = references.find(i); - if (j == references.end()) - debug("unreferenced input: '%1%'", worker.store.printStorePath(i)); - else + if (references.count(i)) debug("referenced input: '%1%'", worker.store.printStorePath(i)); + else + debug("unreferenced input: '%1%'", worker.store.printStorePath(i)); } if (curRound == nrRounds) { @@ -2547,11 +2630,12 @@ void LocalDerivationGoal::registerOutputs() } if (buildMode == bmCheck) { - // In case of FOD mismatches on `--check` an error must be thrown as this is also - // a source for non-determinism. + /* In case of fixed-output derivations, if there are + mismatches on `--check` an error must be thrown as this is + also a source for non-determinism. */ if (delayedException) std::rethrow_exception(delayedException); - return; + return assertPathValidity(); } /* Apply output checks. */ @@ -2563,7 +2647,7 @@ void LocalDerivationGoal::registerOutputs() assert(prevInfos.size() == infos.size()); for (auto i = prevInfos.begin(), j = infos.begin(); i != prevInfos.end(); ++i, ++j) if (!(*i == *j)) { - result.isNonDeterministic = true; + buildResult.isNonDeterministic = true; Path prev = worker.store.printStorePath(i->second.path) + checkSuffix; bool prevExists = keepPreviousRound && pathExists(prev); hintformat hint = prevExists @@ -2594,14 +2678,13 @@ void LocalDerivationGoal::registerOutputs() Path prev = path + checkSuffix; deletePath(prev); Path dst = path + checkSuffix; - if (rename(path.c_str(), dst.c_str())) - throw SysError("renaming '%s' to '%s'", path, dst); + renameFile(path, dst); } } if (curRound < nrRounds) { prevInfos = std::move(infos); - return; + return {}; } /* Remove the .check directories if we're done. FIXME: keep them @@ -2636,17 +2719,29 @@ void LocalDerivationGoal::registerOutputs() means it's safe to link the derivation to the output hash. We must do that for floating CA derivations, which otherwise couldn't be cached, but it's fine to do in all cases. */ - - if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) { - for (auto& [outputName, newInfo] : infos) { - auto thisRealisation = Realisation{ - .id = DrvOutput{initialOutputs.at(outputName).outputHash, - outputName}, - .outPath = newInfo.path}; + DrvOutputs builtOutputs; + + for (auto & [outputName, newInfo] : infos) { + auto oldinfo = get(initialOutputs, outputName); + assert(oldinfo); + auto thisRealisation = Realisation { + .id = DrvOutput { + oldinfo->outputHash, + outputName + }, + .outPath = newInfo.path + }; + if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) + && drv->type().isPure()) + { signRealisation(thisRealisation); worker.store.registerDrvOutput(thisRealisation); } + if (wantOutput(outputName, wantedOutputs)) + builtOutputs.emplace(thisRealisation.id, thisRealisation); } + + return builtOutputs; } void LocalDerivationGoal::signRealisation(Realisation & realisation) @@ -2655,7 +2750,7 @@ void LocalDerivationGoal::signRealisation(Realisation & realisation) } -void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs) +void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo> & outputs) { std::map<Path, const ValidPathInfo &> outputsByPath; for (auto & output : outputs) @@ -2727,9 +2822,10 @@ void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & out for (auto & i : *value) { if (worker.store.isStorePath(i)) spec.insert(worker.store.parseStorePath(i)); - else if (finalOutputs.count(i)) - spec.insert(finalOutputs.at(i)); - else throw BuildError("derivation contains an illegal reference specifier '%s'", i); + else if (auto output = get(outputs, i)) + spec.insert(output->path); + else + throw BuildError("derivation contains an illegal reference specifier '%s'", i); } auto used = recursive @@ -2768,24 +2864,18 @@ void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & out }; if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) { - auto outputChecks = structuredAttrs->find("outputChecks"); - if (outputChecks != structuredAttrs->end()) { - auto output = outputChecks->find(outputName); - - if (output != outputChecks->end()) { + if (auto outputChecks = get(*structuredAttrs, "outputChecks")) { + if (auto output = get(*outputChecks, outputName)) { Checks checks; - auto maxSize = output->find("maxSize"); - if (maxSize != output->end()) + if (auto maxSize = get(*output, "maxSize")) checks.maxSize = maxSize->get<uint64_t>(); - auto maxClosureSize = output->find("maxClosureSize"); - if (maxClosureSize != output->end()) + if (auto maxClosureSize = get(*output, "maxClosureSize")) checks.maxClosureSize = maxClosureSize->get<uint64_t>(); - auto get = [&](const std::string & name) -> std::optional<Strings> { - auto i = output->find(name); - if (i != output->end()) { + auto get_ = [&](const std::string & name) -> std::optional<Strings> { + if (auto i = get(*output, name)) { Strings res; for (auto j = i->begin(); j != i->end(); ++j) { if (!j->is_string()) @@ -2798,10 +2888,10 @@ void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & out return {}; }; - checks.allowedReferences = get("allowedReferences"); - checks.allowedRequisites = get("allowedRequisites"); - checks.disallowedReferences = get("disallowedReferences"); - checks.disallowedRequisites = get("disallowedRequisites"); + checks.allowedReferences = get_("allowedReferences"); + checks.allowedRequisites = get_("allowedRequisites"); + checks.disallowedReferences = get_("disallowedReferences"); + checks.disallowedRequisites = get_("disallowedRequisites"); applyChecks(checks); } diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index 6f934ef78..e6700a383 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -176,7 +176,7 @@ struct LocalDerivationGoal : public DerivationGoal /* Check that the derivation outputs all exist and register them as valid. */ - void registerOutputs() override; + DrvOutputs registerOutputs() override; void signRealisation(Realisation &) override; diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index c1bb1941d..2af105b4d 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -6,7 +6,7 @@ namespace nix { PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca) - : Goal(worker) + : Goal(worker, DerivedPath::Opaque { storePath }) , storePath(storePath) , repair(repair) , ca(ca) @@ -24,6 +24,20 @@ PathSubstitutionGoal::~PathSubstitutionGoal() } +void PathSubstitutionGoal::done( + ExitCode result, + BuildResult::Status status, + std::optional<std::string> errorMsg) +{ + buildResult.status = status; + if (errorMsg) { + debug(*errorMsg); + buildResult.errorMsg = *errorMsg; + } + amDone(result); +} + + void PathSubstitutionGoal::work() { (this->*state)(); @@ -38,7 +52,7 @@ void PathSubstitutionGoal::init() /* If the path already exists we're done. */ if (!repair && worker.store.isValidPath(storePath)) { - amDone(ecSuccess); + done(ecSuccess, BuildResult::AlreadyValid); return; } @@ -60,12 +74,14 @@ void PathSubstitutionGoal::tryNext() if (subs.size() == 0) { /* None left. Terminate this goal and let someone else deal with it. */ - debug("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)); /* Hack: don't indicate failure if there were no substituters. In that case the calling derivation should just do a build. */ - amDone(substituterFailed ? ecFailed : ecNoSubstituters); + done( + substituterFailed ? ecFailed : ecNoSubstituters, + BuildResult::NoSubstituters, + fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath))); if (substituterFailed) { worker.failedSubstitutions++; @@ -138,7 +154,7 @@ void PathSubstitutionGoal::tryNext() only after we've downloaded the path. */ if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info)) { - warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'", + warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'", worker.store.printStorePath(storePath), sub->getUri()); tryNext(); return; @@ -162,8 +178,10 @@ void PathSubstitutionGoal::referencesValid() trace("all references realised"); if (nrFailed > 0) { - debug("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)); - amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed); + done( + nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed, + BuildResult::DependencyFailed, + fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath))); return; } @@ -268,7 +286,7 @@ void PathSubstitutionGoal::finished() worker.updateProgress(); - amDone(ecSuccess); + done(ecSuccess, BuildResult::Substituted); } diff --git a/src/libstore/build/substitution-goal.hh b/src/libstore/build/substitution-goal.hh index d8399d2a8..a73f8e666 100644 --- a/src/libstore/build/substitution-goal.hh +++ b/src/libstore/build/substitution-goal.hh @@ -53,6 +53,11 @@ struct PathSubstitutionGoal : public Goal /* Content address for recomputing store path */ std::optional<ContentAddress> ca; + void done( + ExitCode result, + BuildResult::Status status, + std::optional<std::string> errorMsg = {}); + public: PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt); ~PathSubstitutionGoal(); diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index f72c1cc9c..b192fbc77 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -350,7 +350,7 @@ void Worker::waitForInput() become `available'. Note that `available' (i.e., non-blocking) includes EOF. */ std::vector<struct pollfd> pollStatus; - std::map <int, int> fdToPollStatus; + std::map<int, size_t> fdToPollStatus; for (auto & i : children) { for (auto & j : i.fds) { pollStatus.push_back((struct pollfd) { .fd = j, .events = POLLIN }); @@ -380,7 +380,10 @@ void Worker::waitForInput() std::set<int> fds2(j->fds); std::vector<unsigned char> buffer(4096); for (auto & k : fds2) { - if (pollStatus.at(fdToPollStatus.at(k)).revents) { + const auto fdPollStatusId = get(fdToPollStatus, k); + assert(fdPollStatusId); + assert(*fdPollStatusId < pollStatus.size()); + if (pollStatus.at(*fdPollStatusId).revents) { ssize_t rd = ::read(k, buffer.data(), buffer.size()); // FIXME: is there a cleaner way to handle pt close // than EIO? Is this even standard? diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 25d015cb9..47458a388 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -47,9 +47,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, throw; } - /* The files below are special-cased to that they don't show up - * in user profiles, either because they are useless, or - * because they would cauase pointless collisions (e.g., each + /* The files below are special-cased to that they don't show + * up in user profiles, either because they are useless, or + * because they would cause pointless collisions (e.g., each * Python package brings its own * `$out/lib/pythonX.Y/site-packages/easy-install.pth'.) */ @@ -57,7 +57,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, hasSuffix(srcFile, "/nix-support") || hasSuffix(srcFile, "/perllocal.pod") || hasSuffix(srcFile, "/info/dir") || - hasSuffix(srcFile, "/log")) + hasSuffix(srcFile, "/log") || + hasSuffix(srcFile, "/manifest.nix") || + hasSuffix(srcFile, "/manifest.json")) continue; else if (S_ISDIR(srcSt.st_mode)) { @@ -91,8 +93,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir, auto prevPriority = state.priorities[dstFile]; if (prevPriority == priority) throw Error( - "packages '%1%' and '%2%' have the same priority %3%; " + "files '%1%' and '%2%' have the same priority %3%; " "use 'nix-env --set-flag priority NUMBER INSTALLED_PKGNAME' " + "or type 'nix profile install --help' if using 'nix profile' to find out how" "to change the priority of one of the conflicting packages" " (0 being the highest priority)", srcFile, readLink(dstFile), priority); diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index af3dfc409..7d7924d77 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -24,7 +24,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) Path storePath = getAttr("out"); auto mainUrl = getAttr("url"); - bool unpack = get(drv.env, "unpack").value_or("") == "1"; + bool unpack = getOr(drv.env, "unpack", "") == "1"; /* Note: have to use a fresh fileTransfer here because we're in a forked process. */ diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index 426d58a53..ba04bb16c 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -22,8 +22,7 @@ void builtinUnpackChannel(const BasicDerivation & drv) auto entries = readDirectory(out); if (entries.size() != 1) throw Error("channel tarball '%s' contains more than one file", src); - if (rename((out + "/" + entries[0].name).c_str(), (out + "/" + channelName).c_str()) == -1) - throw SysError("renaming channel directory"); + renameFile((out + "/" + entries[0].name), (out + "/" + channelName)); } } diff --git a/src/libstore/ca-specific-schema.sql b/src/libstore/ca-specific-schema.sql index 64cc97fde..4ca91f585 100644 --- a/src/libstore/ca-specific-schema.sql +++ b/src/libstore/ca-specific-schema.sql @@ -13,12 +13,27 @@ create table if not exists Realisations ( create index if not exists IndexRealisations on Realisations(drvPath, outputName); +-- We can end-up in a weird edge-case where a path depends on itself because +-- it’s an output of a CA derivation, that happens to be the same as one of its +-- dependencies. +-- In that case we have a dependency loop (path -> realisation1 -> realisation2 +-- -> path) that we need to break by removing the dependencies between the +-- realisations +create trigger if not exists DeleteSelfRefsViaRealisations before delete on ValidPaths + begin + delete from RealisationsRefs where realisationReference in ( + select id from Realisations where outputPath = old.id + ); + end; + create table if not exists RealisationsRefs ( referrer integer not null, realisationReference integer, foreign key (referrer) references Realisations(id) on delete cascade, foreign key (realisationReference) references Realisations(id) on delete restrict ); +-- used by deletion trigger +create index if not exists IndexRealisationsRefsRealisationReference on RealisationsRefs(realisationReference); -- used by QueryRealisationReferences create index if not exists IndexRealisationsRefs on RealisationsRefs(referrer); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 2ba03f0dd..48dd5c247 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -1,7 +1,11 @@ #include "daemon.hh" #include "monitor-fd.hh" #include "worker-protocol.hh" +#include "build-result.hh" #include "store-api.hh" +#include "store-cast.hh" +#include "gc-store.hh" +#include "log-store.hh" #include "path-with-outputs.hh" #include "finally.hh" #include "archive.hh" @@ -235,6 +239,8 @@ struct ClientSettings else if (trusted || name == settings.buildTimeout.name || name == settings.buildRepeat.name + || name == settings.maxSilentTime.name + || name == settings.pollInterval.name || name == "connect-timeout" || (name == "builders" && value == "")) settings.set(name, value); @@ -530,6 +536,25 @@ static void performOp(TunnelLogger * logger, ref<Store> store, break; } + case wopBuildPathsWithResults: { + auto drvs = readDerivedPaths(*store, clientVersion, from); + BuildMode mode = bmNormal; + mode = (BuildMode) readInt(from); + + /* Repairing is not atomic, so disallowed for "untrusted" + clients. */ + if (mode == bmRepair && !trusted) + throw Error("repairing is not allowed because you are not in 'trusted-users'"); + + logger->startWork(); + auto results = store->buildPathsWithResults(drvs, mode); + logger->stopWork(); + + worker_proto::write(*store, to, results); + + break; + } + case wopBuildDerivation: { auto drvPath = store->parseStorePath(readString(from)); BasicDerivation drv; @@ -537,6 +562,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store, BuildMode buildMode = (BuildMode) readInt(from); logger->startWork(); + auto drvType = drv.type(); + /* Content-addressed derivations are trustless because their output paths are verified by their content alone, so any derivation is free to try to produce such a path. @@ -569,12 +596,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store, derivations, we throw out the precomputed output paths and just store the hashes, so there aren't two competing sources of truth an attacker could exploit. */ - if (drv.type() == DerivationType::InputAddressed && !trusted) + if (!(drvType.isCA() || trusted)) throw Error("you are not privileged to build input-addressed derivations"); /* Make sure that the non-input-addressed derivations that got this far are in fact content-addressed if we don't trust them. */ - assert(derivationIsCA(drv.type()) || trusted); + assert(drvType.isCA() || trusted); /* Recompute the derivation path when we cannot trust the original. */ if (!trusted) { @@ -583,7 +610,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store, original not-necessarily-resolved derivation to verify the drv derivation as adequate claim to the input-addressed output paths. */ - assert(derivationIsCA(drv.type())); + assert(drvType.isCA()); Derivation drv2; static_cast<BasicDerivation &>(drv2) = drv; @@ -622,9 +649,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store, case wopAddIndirectRoot: { Path path = absPath(readString(from)); + logger->startWork(); - store->addIndirectRoot(path); + auto & gcStore = require<GcStore>(*store); + gcStore.addIndirectRoot(path); logger->stopWork(); + to << 1; break; } @@ -639,7 +669,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store, case wopFindRoots: { logger->startWork(); - Roots roots = store->findRoots(!trusted); + auto & gcStore = require<GcStore>(*store); + Roots roots = gcStore.findRoots(!trusted); logger->stopWork(); size_t size = 0; @@ -670,7 +701,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store, logger->startWork(); if (options.ignoreLiveness) throw Error("you are not allowed to ignore liveness"); - store->collectGarbage(options, results); + auto & gcStore = require<GcStore>(*store); + gcStore.collectGarbage(options, results); logger->stopWork(); to << results.paths << results.bytesFreed << 0 /* obsolete */; @@ -927,11 +959,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store, logger->startWork(); if (!trusted) throw Error("you are not privileged to add logs"); + auto & logStore = require<LogStore>(*store); { FramedSource source(from); StringSink sink; source.drainInto(sink); - store->addBuildLog(path, sink.s); + logStore.addBuildLog(path, sink.s); } logger->stopWork(); to << 1; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index a49be0057..fe99c3c5e 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -11,72 +11,114 @@ namespace nix { std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const { return std::visit(overloaded { - [](const DerivationOutputInputAddressed & doi) -> std::optional<StorePath> { + [](const DerivationOutput::InputAddressed & doi) -> std::optional<StorePath> { return { doi.path }; }, - [&](const DerivationOutputCAFixed & dof) -> std::optional<StorePath> { + [&](const DerivationOutput::CAFixed & dof) -> std::optional<StorePath> { return { dof.path(store, drvName, outputName) }; }, - [](const DerivationOutputCAFloating & dof) -> std::optional<StorePath> { + [](const DerivationOutput::CAFloating & dof) -> std::optional<StorePath> { return std::nullopt; }, - [](const DerivationOutputDeferred &) -> std::optional<StorePath> { + [](const DerivationOutput::Deferred &) -> std::optional<StorePath> { return std::nullopt; }, - }, output); + [](const DerivationOutput::Impure &) -> std::optional<StorePath> { + return std::nullopt; + }, + }, raw()); } -StorePath DerivationOutputCAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const { +StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const +{ return store.makeFixedOutputPath( hash.method, hash.hash, outputPathName(drvName, outputName)); } -bool derivationIsCA(DerivationType dt) { - switch (dt) { - case DerivationType::InputAddressed: return false; - case DerivationType::CAFixed: return true; - case DerivationType::CAFloating: return true; - case DerivationType::DeferredInputAddressed: return false; - }; - // Since enums can have non-variant values, but making a `default:` would - // disable exhaustiveness warnings. - assert(false); +bool DerivationType::isCA() const +{ + /* Normally we do the full `std::visit` to make sure we have + exhaustively handled all variants, but so long as there is a + variant called `ContentAddressed`, it must be the only one for + which `isCA` is true for this to make sense!. */ + return std::visit(overloaded { + [](const InputAddressed & ia) { + return false; + }, + [](const ContentAddressed & ca) { + return true; + }, + [](const Impure &) { + return true; + }, + }, raw()); } -bool derivationIsFixed(DerivationType dt) { - switch (dt) { - case DerivationType::InputAddressed: return false; - case DerivationType::CAFixed: return true; - case DerivationType::CAFloating: return false; - case DerivationType::DeferredInputAddressed: return false; - }; - assert(false); +bool DerivationType::isFixed() const +{ + return std::visit(overloaded { + [](const InputAddressed & ia) { + return false; + }, + [](const ContentAddressed & ca) { + return ca.fixed; + }, + [](const Impure &) { + return false; + }, + }, raw()); } -bool derivationHasKnownOutputPaths(DerivationType dt) { - switch (dt) { - case DerivationType::InputAddressed: return true; - case DerivationType::CAFixed: return true; - case DerivationType::CAFloating: return false; - case DerivationType::DeferredInputAddressed: return false; - }; - assert(false); +bool DerivationType::hasKnownOutputPaths() const +{ + return std::visit(overloaded { + [](const InputAddressed & ia) { + return !ia.deferred; + }, + [](const ContentAddressed & ca) { + return ca.fixed; + }, + [](const Impure &) { + return false; + }, + }, raw()); } -bool derivationIsImpure(DerivationType dt) { - switch (dt) { - case DerivationType::InputAddressed: return false; - case DerivationType::CAFixed: return true; - case DerivationType::CAFloating: return false; - case DerivationType::DeferredInputAddressed: return false; - }; - assert(false); +bool DerivationType::isSandboxed() const +{ + return std::visit(overloaded { + [](const InputAddressed & ia) { + return true; + }, + [](const ContentAddressed & ca) { + return ca.sandboxed; + }, + [](const Impure &) { + return false; + }, + }, raw()); +} + + +bool DerivationType::isPure() const +{ + return std::visit(overloaded { + [](const InputAddressed & ia) { + return true; + }, + [](const ContentAddressed & ca) { + return true; + }, + [](const Impure &) { + return false; + }, + }, raw()); } @@ -177,37 +219,36 @@ static DerivationOutput parseDerivationOutput(const Store & store, hashAlgo = hashAlgo.substr(2); } const auto hashType = parseHashType(hashAlgo); - if (hash != "") { + if (hash == "impure") { + settings.requireExperimentalFeature(Xp::ImpureDerivations); + assert(pathS == ""); + return DerivationOutput::Impure { + .method = std::move(method), + .hashType = std::move(hashType), + }; + } else if (hash != "") { validatePath(pathS); - return DerivationOutput { - .output = DerivationOutputCAFixed { - .hash = FixedOutputHash { - .method = std::move(method), - .hash = Hash::parseNonSRIUnprefixed(hash, hashType), - }, + return DerivationOutput::CAFixed { + .hash = FixedOutputHash { + .method = std::move(method), + .hash = Hash::parseNonSRIUnprefixed(hash, hashType), }, }; } else { settings.requireExperimentalFeature(Xp::CaDerivations); assert(pathS == ""); - return DerivationOutput { - .output = DerivationOutputCAFloating { - .method = std::move(method), - .hashType = std::move(hashType), - }, + return DerivationOutput::CAFloating { + .method = std::move(method), + .hashType = std::move(hashType), }; } } else { if (pathS == "") { - return DerivationOutput { - .output = DerivationOutputDeferred { } - }; + return DerivationOutput::Deferred { }; } validatePath(pathS); - return DerivationOutput { - .output = DerivationOutputInputAddressed { - .path = store.parseStorePath(pathS), - } + return DerivationOutput::InputAddressed { + .path = store.parseStorePath(pathS), }; } } @@ -335,27 +376,33 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs, if (first) first = false; else s += ','; s += '('; printUnquotedString(s, i.first); std::visit(overloaded { - [&](const DerivationOutputInputAddressed & doi) { + [&](const DerivationOutput::InputAddressed & doi) { s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path)); s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, ""); }, - [&](const DerivationOutputCAFixed & dof) { + [&](const DerivationOutput::CAFixed & dof) { s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first))); s += ','; printUnquotedString(s, dof.hash.printMethodAlgo()); s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false)); }, - [&](const DerivationOutputCAFloating & dof) { + [&](const DerivationOutput::CAFloating & dof) { s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)); s += ','; printUnquotedString(s, ""); }, - [&](const DerivationOutputDeferred &) { + [&](const DerivationOutput::Deferred &) { s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, ""); + }, + [&](const DerivationOutputImpure & doi) { + // FIXME + s += ','; printUnquotedString(s, ""); + s += ','; printUnquotedString(s, makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType)); + s += ','; printUnquotedString(s, "impure"); } - }, i.second.output); + }, i.second.raw()); s += ')'; } @@ -419,49 +466,100 @@ std::string outputPathName(std::string_view drvName, std::string_view outputName DerivationType BasicDerivation::type() const { - std::set<std::string_view> inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs, deferredIAOutputs; + std::set<std::string_view> + inputAddressedOutputs, + fixedCAOutputs, + floatingCAOutputs, + deferredIAOutputs, + impureOutputs; std::optional<HashType> floatingHashType; + for (auto & i : outputs) { std::visit(overloaded { - [&](const DerivationOutputInputAddressed &) { + [&](const DerivationOutput::InputAddressed &) { inputAddressedOutputs.insert(i.first); }, - [&](const DerivationOutputCAFixed &) { + [&](const DerivationOutput::CAFixed &) { fixedCAOutputs.insert(i.first); }, - [&](const DerivationOutputCAFloating & dof) { + [&](const DerivationOutput::CAFloating & dof) { floatingCAOutputs.insert(i.first); if (!floatingHashType) { floatingHashType = dof.hashType; } else { if (*floatingHashType != dof.hashType) - throw Error("All floating outputs must use the same hash type"); + throw Error("all floating outputs must use the same hash type"); } }, - [&](const DerivationOutputDeferred &) { - deferredIAOutputs.insert(i.first); + [&](const DerivationOutput::Deferred &) { + deferredIAOutputs.insert(i.first); }, - }, i.second.output); + [&](const DerivationOutput::Impure &) { + impureOutputs.insert(i.first); + }, + }, i.second.raw()); } - if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() && deferredIAOutputs.empty()) { - throw Error("Must have at least one output"); - } else if (! inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() && deferredIAOutputs.empty()) { - return DerivationType::InputAddressed; - } else if (inputAddressedOutputs.empty() && ! fixedCAOutputs.empty() && floatingCAOutputs.empty() && deferredIAOutputs.empty()) { + if (inputAddressedOutputs.empty() + && fixedCAOutputs.empty() + && floatingCAOutputs.empty() + && deferredIAOutputs.empty() + && impureOutputs.empty()) + throw Error("must have at least one output"); + + if (!inputAddressedOutputs.empty() + && fixedCAOutputs.empty() + && floatingCAOutputs.empty() + && deferredIAOutputs.empty() + && impureOutputs.empty()) + return DerivationType::InputAddressed { + .deferred = false, + }; + + if (inputAddressedOutputs.empty() + && !fixedCAOutputs.empty() + && floatingCAOutputs.empty() + && deferredIAOutputs.empty() + && impureOutputs.empty()) + { if (fixedCAOutputs.size() > 1) // FIXME: Experimental feature? - throw Error("Only one fixed output is allowed for now"); + throw Error("only one fixed output is allowed for now"); if (*fixedCAOutputs.begin() != "out") - throw Error("Single fixed output must be named \"out\""); - return DerivationType::CAFixed; - } else if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && ! floatingCAOutputs.empty() && deferredIAOutputs.empty()) { - return DerivationType::CAFloating; - } else if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() && !deferredIAOutputs.empty()) { - return DerivationType::DeferredInputAddressed; - } else { - throw Error("Can't mix derivation output types"); + throw Error("single fixed output must be named \"out\""); + return DerivationType::ContentAddressed { + .sandboxed = false, + .fixed = true, + }; } + + if (inputAddressedOutputs.empty() + && fixedCAOutputs.empty() + && !floatingCAOutputs.empty() + && deferredIAOutputs.empty() + && impureOutputs.empty()) + return DerivationType::ContentAddressed { + .sandboxed = true, + .fixed = false, + }; + + if (inputAddressedOutputs.empty() + && fixedCAOutputs.empty() + && floatingCAOutputs.empty() + && !deferredIAOutputs.empty() + && impureOutputs.empty()) + return DerivationType::InputAddressed { + .deferred = true, + }; + + if (inputAddressedOutputs.empty() + && fixedCAOutputs.empty() + && floatingCAOutputs.empty() + && deferredIAOutputs.empty() + && !impureOutputs.empty()) + return DerivationType::Impure { }; + + throw Error("can't mix derivation output types"); } @@ -473,7 +571,7 @@ Sync<DrvHashes> drvHashes; /* Look up the derivation by value and memoize the `hashDerivationModulo` call. */ -static const DrvHashModulo pathDerivationModulo(Store & store, const StorePath & drvPath) +static const DrvHash pathDerivationModulo(Store & store, const StorePath & drvPath) { { auto hashes = drvHashes.lock(); @@ -508,88 +606,85 @@ static const DrvHashModulo pathDerivationModulo(Store & store, const StorePath & don't leak the provenance of fixed outputs, reducing pointless cache misses as the build itself won't know this. */ -DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs) +DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs) { - bool isDeferred = false; + auto type = drv.type(); + /* Return a fixed hash for fixed-output derivations. */ - switch (drv.type()) { - case DerivationType::CAFixed: { + if (type.isFixed()) { std::map<std::string, Hash> outputHashes; for (const auto & i : drv.outputs) { - auto & dof = std::get<DerivationOutputCAFixed>(i.second.output); + auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw()); auto hash = hashString(htSHA256, "fixed:out:" + dof.hash.printMethodAlgo() + ":" + dof.hash.hash.to_string(Base16, false) + ":" + store.printStorePath(dof.path(store, drv.name, i.first))); outputHashes.insert_or_assign(i.first, std::move(hash)); } - return outputHashes; + return DrvHash { + .hashes = outputHashes, + .kind = DrvHash::Kind::Regular, + }; } - case DerivationType::CAFloating: - isDeferred = true; - break; - case DerivationType::InputAddressed: - break; - case DerivationType::DeferredInputAddressed: - break; + + if (!type.isPure()) { + std::map<std::string, Hash> outputHashes; + for (const auto & [outputName, _] : drv.outputs) + outputHashes.insert_or_assign(outputName, impureOutputHash); + return DrvHash { + .hashes = outputHashes, + .kind = DrvHash::Kind::Deferred, + }; } - /* For other derivations, replace the inputs paths with recursive - calls to this function. */ + auto kind = std::visit(overloaded { + [](const DerivationType::InputAddressed & ia) { + /* This might be a "pesimistically" deferred output, so we don't + "taint" the kind yet. */ + return DrvHash::Kind::Regular; + }, + [](const DerivationType::ContentAddressed & ca) { + return ca.fixed + ? DrvHash::Kind::Regular + : DrvHash::Kind::Deferred; + }, + [](const DerivationType::Impure &) -> DrvHash::Kind { + assert(false); + } + }, drv.type().raw()); + std::map<std::string, StringSet> inputs2; - for (auto & i : drv.inputDrvs) { - const auto & res = pathDerivationModulo(store, i.first); - std::visit(overloaded { - // Regular non-CA derivation, replace derivation - [&](const Hash & drvHash) { - inputs2.insert_or_assign(drvHash.to_string(Base16, false), i.second); - }, - [&](const DeferredHash & deferredHash) { - isDeferred = true; - inputs2.insert_or_assign(deferredHash.hash.to_string(Base16, false), i.second); - }, - // CA derivation's output hashes - [&](const CaOutputHashes & outputHashes) { - std::set<std::string> justOut = { "out" }; - for (auto & output : i.second) { - /* Put each one in with a single "out" output.. */ - const auto h = outputHashes.at(output); - inputs2.insert_or_assign( - h.to_string(Base16, false), - justOut); - } - }, - }, res); + for (auto & [drvPath, inputOutputs0] : drv.inputDrvs) { + // Avoid lambda capture restriction with standard / Clang + auto & inputOutputs = inputOutputs0; + const auto & res = pathDerivationModulo(store, drvPath); + if (res.kind == DrvHash::Kind::Deferred) + kind = DrvHash::Kind::Deferred; + for (auto & outputName : inputOutputs) { + const auto h = get(res.hashes, outputName); + if (!h) + throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name); + inputs2[h->to_string(Base16, false)].insert(outputName); + } } auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2)); - if (isDeferred) - return DeferredHash { hash }; - else - return hash; + std::map<std::string, Hash> outputHashes; + for (const auto & [outputName, _] : drv.outputs) { + outputHashes.insert_or_assign(outputName, hash); + } + + return DrvHash { + .hashes = outputHashes, + .kind = kind, + }; } std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv) { - std::map<std::string, Hash> res; - std::visit(overloaded { - [&](const Hash & drvHash) { - for (auto & outputName : drv.outputNames()) { - res.insert({outputName, drvHash}); - } - }, - [&](const DeferredHash & deferredHash) { - for (auto & outputName : drv.outputNames()) { - res.insert({outputName, deferredHash.hash}); - } - }, - [&](const CaOutputHashes & outputHashes) { - res = outputHashes; - }, - }, hashDerivationModulo(store, drv, true)); - return res; + return hashDerivationModulo(store, drv, true).hashes; } @@ -616,7 +711,8 @@ StringSet BasicDerivation::outputNames() const return names; } -DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const { +DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const +{ DerivationOutputsAndOptPaths outsAndOptPaths; for (auto output : outputs) outsAndOptPaths.insert(std::make_pair( @@ -627,7 +723,8 @@ DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & s return outsAndOptPaths; } -std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) { +std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) +{ auto nameWithSuffix = drvPath.name(); constexpr std::string_view extension = ".drv"; assert(hasSuffix(nameWithSuffix, extension)); @@ -669,27 +766,32 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr for (auto & i : drv.outputs) { out << i.first; std::visit(overloaded { - [&](const DerivationOutputInputAddressed & doi) { + [&](const DerivationOutput::InputAddressed & doi) { out << store.printStorePath(doi.path) << "" << ""; }, - [&](const DerivationOutputCAFixed & dof) { + [&](const DerivationOutput::CAFixed & dof) { out << store.printStorePath(dof.path(store, drv.name, i.first)) << dof.hash.printMethodAlgo() << dof.hash.hash.to_string(Base16, false); }, - [&](const DerivationOutputCAFloating & dof) { + [&](const DerivationOutput::CAFloating & dof) { out << "" << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)) << ""; }, - [&](const DerivationOutputDeferred &) { + [&](const DerivationOutput::Deferred &) { out << "" << "" << ""; }, - }, i.second.output); + [&](const DerivationOutput::Impure & doi) { + out << "" + << (makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType)) + << "impure"; + }, + }, i.second.raw()); } worker_proto::write(store, out, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; @@ -714,21 +816,19 @@ std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath } -static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) { - - debug("Rewriting the derivation"); - - for (auto &rewrite: rewrites) { +static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) +{ + for (auto & rewrite : rewrites) { debug("rewriting %s as %s", rewrite.first, rewrite.second); } drv.builder = rewriteStrings(drv.builder, rewrites); - for (auto & arg: drv.args) { + for (auto & arg : drv.args) { arg = rewriteStrings(arg, rewrites); } StringPairs newEnv; - for (auto & envVar: drv.env) { + for (auto & envVar : drv.env) { auto envName = rewriteStrings(envVar.first, rewrites); auto envValue = rewriteStrings(envVar.second, rewrites); newEnv.emplace(envName, envValue); @@ -737,43 +837,55 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String auto hashModulo = hashDerivationModulo(store, Derivation(drv), true); for (auto & [outputName, output] : drv.outputs) { - if (std::holds_alternative<DerivationOutputDeferred>(output.output)) { - Hash h = std::get<Hash>(hashModulo); - auto outPath = store.makeOutputPath(outputName, h, drv.name); + if (std::holds_alternative<DerivationOutput::Deferred>(output.raw())) { + auto h = get(hashModulo.hashes, outputName); + if (!h) + throw Error("derivation '%s' output '%s' has no hash (derivations.cc/rewriteDerivation)", + drv.name, outputName); + auto outPath = store.makeOutputPath(outputName, *h, drv.name); drv.env[outputName] = store.printStorePath(outPath); - output = DerivationOutput { - .output = DerivationOutputInputAddressed { - .path = std::move(outPath), - }, + output = DerivationOutput::InputAddressed { + .path = std::move(outPath), }; } } } -std::optional<BasicDerivation> Derivation::tryResolve(Store & store) { +std::optional<BasicDerivation> Derivation::tryResolve(Store & store) const +{ + std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs; + + for (auto & input : inputDrvs) + for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(input.first)) + if (outputPath) + inputDrvOutputs.insert_or_assign({input.first, outputName}, *outputPath); + + return tryResolve(store, inputDrvOutputs); +} + +std::optional<BasicDerivation> Derivation::tryResolve( + Store & store, + const std::map<std::pair<StorePath, std::string>, StorePath> & inputDrvOutputs) const +{ BasicDerivation resolved { *this }; // Input paths that we'll want to rewrite in the derivation StringMap inputRewrites; - for (auto & input : inputDrvs) { - auto inputDrvOutputs = store.queryPartialDerivationOutputMap(input.first); - StringSet newOutputNames; - for (auto & outputName : input.second) { - auto actualPathOpt = inputDrvOutputs.at(outputName); - if (!actualPathOpt) { - warn("output %s of input %s missing, aborting the resolving", + for (auto & [inputDrv, inputOutputs] : inputDrvs) { + for (auto & outputName : inputOutputs) { + if (auto actualPath = get(inputDrvOutputs, { inputDrv, outputName })) { + inputRewrites.emplace( + downstreamPlaceholder(store, inputDrv, outputName), + store.printStorePath(*actualPath)); + resolved.inputSrcs.insert(*actualPath); + } else { + warn("output '%s' of input '%s' missing, aborting the resolving", outputName, - store.printStorePath(input.first) - ); - return std::nullopt; + store.printStorePath(inputDrv)); + return {}; } - auto actualPath = *actualPathOpt; - inputRewrites.emplace( - downstreamPlaceholder(store, input.first, outputName), - store.printStorePath(actualPath)); - resolved.inputSrcs.insert(std::move(actualPath)); } } @@ -782,4 +894,6 @@ std::optional<BasicDerivation> Derivation::tryResolve(Store & store) { return resolved; } +const Hash impureOutputHash = hashString(htSHA256, "impure"); + } diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 132de82b6..af198a767 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -4,6 +4,7 @@ #include "types.hh" #include "hash.hh" #include "content-address.hh" +#include "repair-flag.hh" #include "sync.hh" #include <map> @@ -40,23 +41,47 @@ struct DerivationOutputCAFloating }; /* Input-addressed output which depends on a (CA) derivation whose hash isn't - * known atm + * known yet. */ struct DerivationOutputDeferred {}; -struct DerivationOutput +/* Impure output which is moved to a content-addressed location (like + CAFloating) but isn't registered as a realization. + */ +struct DerivationOutputImpure +{ + /* information used for expected hash computation */ + FileIngestionMethod method; + HashType hashType; +}; + +typedef std::variant< + DerivationOutputInputAddressed, + DerivationOutputCAFixed, + DerivationOutputCAFloating, + DerivationOutputDeferred, + DerivationOutputImpure +> _DerivationOutputRaw; + +struct DerivationOutput : _DerivationOutputRaw { - std::variant< - DerivationOutputInputAddressed, - DerivationOutputCAFixed, - DerivationOutputCAFloating, - DerivationOutputDeferred - > output; + using Raw = _DerivationOutputRaw; + using Raw::Raw; + + using InputAddressed = DerivationOutputInputAddressed; + using CAFixed = DerivationOutputCAFixed; + using CAFloating = DerivationOutputCAFloating; + using Deferred = DerivationOutputDeferred; + using Impure = DerivationOutputImpure; /* Note, when you use this function you should make sure that you're passing the right derivation name. When in doubt, you should use the safer interface provided by BasicDerivation::outputsAndOptPaths */ std::optional<StorePath> path(const Store & store, std::string_view drvName, std::string_view outputName) const; + + inline const Raw & raw() const { + return static_cast<const Raw &>(*this); + } }; typedef std::map<std::string, DerivationOutput> DerivationOutputs; @@ -72,30 +97,62 @@ typedef std::map<std::string, std::pair<DerivationOutput, std::optional<StorePat output IDs we are interested in. */ typedef std::map<StorePath, StringSet> DerivationInputs; -enum struct DerivationType : uint8_t { - InputAddressed, - DeferredInputAddressed, - CAFixed, - CAFloating, +struct DerivationType_InputAddressed { + bool deferred; }; -/* Do the outputs of the derivation have paths calculated from their content, - or from the derivation itself? */ -bool derivationIsCA(DerivationType); - -/* Is the content of the outputs fixed a-priori via a hash? Never true for - non-CA derivations. */ -bool derivationIsFixed(DerivationType); +struct DerivationType_ContentAddressed { + bool sandboxed; + bool fixed; +}; -/* Is the derivation impure and needs to access non-deterministic resources, or - pure and can be sandboxed? Note that whether or not we actually sandbox the - derivation is controlled separately. Never true for non-CA derivations. */ -bool derivationIsImpure(DerivationType); +struct DerivationType_Impure { +}; -/* Does the derivation knows its own output paths? - * Only true when there's no floating-ca derivation involved in the closure. - */ -bool derivationHasKnownOutputPaths(DerivationType); +typedef std::variant< + DerivationType_InputAddressed, + DerivationType_ContentAddressed, + DerivationType_Impure +> _DerivationTypeRaw; + +struct DerivationType : _DerivationTypeRaw { + using Raw = _DerivationTypeRaw; + using Raw::Raw; + using InputAddressed = DerivationType_InputAddressed; + using ContentAddressed = DerivationType_ContentAddressed; + using Impure = DerivationType_Impure; + + /* Do the outputs of the derivation have paths calculated from their content, + or from the derivation itself? */ + bool isCA() const; + + /* Is the content of the outputs fixed a-priori via a hash? Never true for + non-CA derivations. */ + bool isFixed() const; + + /* Whether the derivation is fully sandboxed. If false, the + sandbox is opened up, e.g. the derivation has access to the + network. Note that whether or not we actually sandbox the + derivation is controlled separately. Always true for non-CA + derivations. */ + bool isSandboxed() const; + + /* Whether the derivation is expected to produce the same result + every time, and therefore it only needs to be built once. This + is only false for derivations that have the attribute '__impure + = true'. */ + bool isPure() const; + + /* Does the derivation knows its own output paths? + Only true when there's no floating-ca derivation involved in the + closure, or if fixed output. + */ + bool hasKnownOutputPaths() const; + + inline const Raw & raw() const { + return static_cast<const Raw &>(*this); + } +}; struct BasicDerivation { @@ -140,7 +197,14 @@ struct Derivation : BasicDerivation added directly to input sources. 2. Input placeholders are replaced with realized input store paths. */ - std::optional<BasicDerivation> tryResolve(Store & store); + std::optional<BasicDerivation> tryResolve(Store & store) const; + + /* Like the above, but instead of querying the Nix database for + realisations, uses a given mapping from input derivation paths + + output names to actual output store paths. */ + std::optional<BasicDerivation> tryResolve( + Store & store, + const std::map<std::pair<StorePath, std::string>, StorePath> & inputDrvOutputs) const; Derivation() = default; Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { } @@ -150,8 +214,6 @@ struct Derivation : BasicDerivation class Store; -enum RepairFlag : bool { NoRepair = false, Repair = true }; - /* Write a derivation to the Nix store, and return its path. */ StorePath writeDerivation(Store & store, const Derivation & drv, @@ -171,17 +233,27 @@ bool isDerivation(const std::string & fileName); the output name is "out". */ std::string outputPathName(std::string_view drvName, std::string_view outputName); -// known CA drv's output hashes, current just for fixed-output derivations -// whose output hashes are always known since they are fixed up-front. -typedef std::map<std::string, Hash> CaOutputHashes; -struct DeferredHash { Hash hash; }; +// The hashes modulo of a derivation. +// +// Each output is given a hash, although in practice only the content-addressed +// derivations (fixed-output or not) will have a different hash for each +// output. +struct DrvHash { + std::map<std::string, Hash> hashes; -typedef std::variant< - Hash, // regular DRV normalized hash - CaOutputHashes, // Fixed-output derivation hashes - DeferredHash // Deferred hashes for floating outputs drvs and their dependencies -> DrvHashModulo; + enum struct Kind : bool { + // Statically determined derivations. + // This hash will be directly used to compute the output paths + Regular, + // Floating-output derivations (and their reverse dependencies). + Deferred, + }; + + Kind kind; +}; + +void operator |= (DrvHash::Kind & self, const DrvHash::Kind & other) noexcept; /* Returns hashes with the details of fixed-output subderivations expunged. @@ -206,16 +278,18 @@ typedef std::variant< ATerm, after subderivations have been likewise expunged from that derivation. */ -DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs); +DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs); /* Return a map associating each output to a hash that uniquely identifies its derivation (modulo the self-references). + + FIXME: what is the Hash in this map? */ -std::map<std::string, Hash> staticOutputHashes(Store& store, const Derivation& drv); +std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv); /* Memoisation of hashDerivationModulo(). */ -typedef std::map<StorePath, DrvHashModulo> DrvHashes; +typedef std::map<StorePath, DrvHash> DrvHashes; // FIXME: global, though at least thread-safe. extern Sync<DrvHashes> drvHashes; @@ -245,4 +319,6 @@ std::string hashPlaceholder(const std::string_view outputName); dependency which is a CA derivation. */ std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName); +extern const Hash impureOutputHash; + } diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 194489580..44587ae78 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -1,8 +1,11 @@ #include "derived-path.hh" +#include "derivations.hh" #include "store-api.hh" #include <nlohmann/json.hpp> +#include <optional> + namespace nix { nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const { @@ -11,6 +14,21 @@ nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const { return res; } +nlohmann::json DerivedPath::Built::toJSON(ref<Store> store) const { + nlohmann::json res; + res["drvPath"] = store->printStorePath(drvPath); + // Fallback for the input-addressed derivation case: We expect to always be + // able to print the output paths, so let’s do it + const auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath); + for (const auto& output : outputs) { + auto knownOutput = get(knownOutputs, output); + res["outputs"][output] = (knownOutput && *knownOutput) + ? store->printStorePath(**knownOutput) + : nullptr; + } + return res; +} + nlohmann::json BuiltPath::Built::toJSON(ref<Store> store) const { nlohmann::json res; res["drvPath"] = store->printStorePath(drvPath); @@ -35,16 +53,22 @@ StorePathSet BuiltPath::outPaths() const ); } -nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store) { +template<typename T> +nlohmann::json stuffToJSON(const std::vector<T> & ts, ref<Store> store) { auto res = nlohmann::json::array(); - for (const BuiltPath & buildable : buildables) { - std::visit([&res, store](const auto & buildable) { - res.push_back(buildable.toJSON(store)); - }, buildable.raw()); + for (const T & t : ts) { + std::visit([&res, store](const auto & t) { + res.push_back(t.toJSON(store)); + }, t.raw()); } return res; } +nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store) +{ return stuffToJSON<BuiltPath>(buildables, store); } +nlohmann::json derivedPathsToJSON(const DerivedPaths & paths, ref<Store> store) +{ return stuffToJSON<DerivedPath>(paths, store); } + std::string DerivedPath::Opaque::to_string(const Store & store) const { return store.printStorePath(path); @@ -101,10 +125,15 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const for (auto& [outputName, outputPath] : p.outputs) { if (settings.isExperimentalFeatureEnabled( Xp::CaDerivations)) { + auto drvOutput = get(drvHashes, outputName); + if (!drvOutput) + throw Error( + "the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)", + store.printStorePath(p.drvPath), outputName); auto thisRealisation = store.queryRealisation( - DrvOutput{drvHashes.at(outputName), outputName}); - assert(thisRealisation); // We’ve built it, so we must h - // ve the realisation + DrvOutput{*drvOutput, outputName}); + assert(thisRealisation); // We’ve built it, so we must + // have the realisation res.insert(*thisRealisation); } else { res.insert(outputPath); diff --git a/src/libstore/derived-path.hh b/src/libstore/derived-path.hh index 9d6ace069..24a0ae773 100644 --- a/src/libstore/derived-path.hh +++ b/src/libstore/derived-path.hh @@ -25,6 +25,9 @@ struct DerivedPathOpaque { nlohmann::json toJSON(ref<Store> store) const; std::string to_string(const Store & store) const; static DerivedPathOpaque parse(const Store & store, std::string_view); + + bool operator < (const DerivedPathOpaque & b) const + { return path < b.path; } }; /** @@ -45,6 +48,10 @@ struct DerivedPathBuilt { std::string to_string(const Store & store) const; static DerivedPathBuilt parse(const Store & store, std::string_view); + nlohmann::json toJSON(ref<Store> store) const; + + bool operator < (const DerivedPathBuilt & b) const + { return std::make_pair(drvPath, outputs) < std::make_pair(b.drvPath, b.outputs); } }; using _DerivedPathRaw = std::variant< @@ -119,5 +126,6 @@ typedef std::vector<DerivedPath> DerivedPaths; typedef std::vector<BuiltPath> BuiltPaths; nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store); +nlohmann::json derivedPathsToJSON(const DerivedPaths & , ref<Store> store); } diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 255b42c49..5746c32a3 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -197,7 +197,7 @@ struct curlFileTransfer : public FileTransfer result.etag = ""; result.data.clear(); result.bodySize = 0; - statusMsg = trim((std::string &) match[1]); + statusMsg = trim(match.str(1)); acceptRanges = false; encoding = ""; } else { @@ -308,6 +308,9 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_HTTPHEADER, requestHeaders); + if (settings.downloadSpeed.get() > 0) + curl_easy_setopt(req, CURLOPT_MAX_RECV_SPEED_LARGE, (curl_off_t) (settings.downloadSpeed.get() * 1024)); + if (request.head) curl_easy_setopt(req, CURLOPT_NOBODY, 1); @@ -319,7 +322,6 @@ struct curlFileTransfer : public FileTransfer } if (request.verifyTLS) { - debug("verify TLS: Nix CA file = '%s'", settings.caFile); if (settings.caFile != "") curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str()); } else { @@ -443,14 +445,13 @@ struct curlFileTransfer : public FileTransfer : httpStatus != 0 ? FileTransferError(err, std::move(response), - fmt("unable to %s '%s': HTTP error %d ('%s')", - request.verb(), request.uri, httpStatus, statusMsg) - + (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) - ) + "unable to %s '%s': HTTP error %d%s", + request.verb(), request.uri, httpStatus, + code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) : FileTransferError(err, std::move(response), - fmt("unable to %s '%s': %s (%d)", - request.verb(), request.uri, curl_easy_strerror(code), code)); + "unable to %s '%s': %s (%d)", + request.verb(), request.uri, curl_easy_strerror(code), code); /* If this is a transient error, then maybe retry the download after a while. If we're writing to a @@ -693,10 +694,10 @@ struct curlFileTransfer : public FileTransfer #if ENABLE_S3 auto [bucketName, key, params] = parseS3Uri(request.uri); - std::string profile = get(params, "profile").value_or(""); - std::string region = get(params, "region").value_or(Aws::Region::US_EAST_1); - std::string scheme = get(params, "scheme").value_or(""); - std::string endpoint = get(params, "endpoint").value_or(""); + std::string profile = getOr(params, "profile", ""); + std::string region = getOr(params, "region", Aws::Region::US_EAST_1); + std::string scheme = getOr(params, "scheme", ""); + std::string endpoint = getOr(params, "endpoint", ""); S3Helper s3Helper(profile, region, scheme, endpoint); @@ -704,7 +705,7 @@ struct curlFileTransfer : public FileTransfer auto s3Res = s3Helper.getObject(bucketName, key); FileTransferResult res; if (!s3Res.data) - throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri); + throw FileTransferError(NotFound, "S3 object '%s' does not exist", request.uri); res.data = std::move(*s3Res.data); callback(std::move(res)); #else diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh index ca61e3937..40e7cf52c 100644 --- a/src/libstore/filetransfer.hh +++ b/src/libstore/filetransfer.hh @@ -31,7 +31,7 @@ struct FileTransferSettings : Config R"( The timeout (in seconds) for establishing connections in the binary cache substituter. It corresponds to `curl`’s - `--connect-timeout` option. + `--connect-timeout` option. A value of 0 means no limit. )"}; Setting<unsigned long> stalledDownloadTimeout{ @@ -123,8 +123,6 @@ public: template<typename... Args> FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args); - - virtual const char* sname() const override { return "FileTransferError"; } }; bool isUri(std::string_view s); diff --git a/src/libstore/gc-store.hh b/src/libstore/gc-store.hh new file mode 100644 index 000000000..b3cbbad74 --- /dev/null +++ b/src/libstore/gc-store.hh @@ -0,0 +1,84 @@ +#pragma once + +#include "store-api.hh" + + +namespace nix { + + +typedef std::unordered_map<StorePath, std::unordered_set<std::string>> Roots; + + +struct GCOptions +{ + /* Garbage collector operation: + + - `gcReturnLive': return the set of paths reachable from + (i.e. in the closure of) the roots. + + - `gcReturnDead': return the set of paths not reachable from + the roots. + + - `gcDeleteDead': actually delete the latter set. + + - `gcDeleteSpecific': delete the paths listed in + `pathsToDelete', insofar as they are not reachable. + */ + typedef enum { + gcReturnLive, + gcReturnDead, + gcDeleteDead, + gcDeleteSpecific, + } GCAction; + + GCAction action{gcDeleteDead}; + + /* If `ignoreLiveness' is set, then reachability from the roots is + ignored (dangerous!). However, the paths must still be + unreferenced *within* the store (i.e., there can be no other + store paths that depend on them). */ + bool ignoreLiveness{false}; + + /* For `gcDeleteSpecific', the paths to delete. */ + StorePathSet pathsToDelete; + + /* Stop after at least `maxFreed' bytes have been freed. */ + uint64_t maxFreed{std::numeric_limits<uint64_t>::max()}; +}; + + +struct GCResults +{ + /* Depending on the action, the GC roots, or the paths that would + be or have been deleted. */ + PathSet paths; + + /* For `gcReturnDead', `gcDeleteDead' and `gcDeleteSpecific', the + number of bytes that would be or was freed. */ + uint64_t bytesFreed = 0; +}; + + +struct GcStore : public virtual Store +{ + inline static std::string operationName = "Garbage collection"; + + /* Add an indirect root, which is merely a symlink to `path' from + /nix/var/nix/gcroots/auto/<hash of `path'>. `path' is supposed + to be a symlink to a store path. The garbage collector will + automatically remove the indirect root when it finds that + `path' has disappeared. */ + virtual void addIndirectRoot(const Path & path) = 0; + + /* Find the roots of the garbage collector. Each root is a pair + (link, storepath) where `link' is the path of the symlink + outside of the Nix store that point to `storePath'. If + 'censor' is true, privacy-sensitive information about roots + found in /proc is censored. */ + virtual Roots findRoots(bool censor) = 0; + + /* Perform a garbage collection. */ + virtual void collectGarbage(const GCOptions & options, GCResults & results) = 0; +}; + +} diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 024da66c1..9ef8972f3 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -39,9 +39,7 @@ static void makeSymlink(const Path & link, const Path & target) createSymlink(target, tempLink); /* Atomically replace the old one. */ - if (rename(tempLink.c_str(), link.c_str()) == -1) - throw SysError("cannot rename '%1%' to '%2%'", - tempLink , link); + renameFile(tempLink, link); } @@ -135,6 +133,7 @@ void LocalStore::addTempRoot(const StorePath & path) state->fdRootsSocket.close(); goto restart; } + throw; } } @@ -153,6 +152,7 @@ void LocalStore::addTempRoot(const StorePath & path) state->fdRootsSocket.close(); goto restart; } + throw; } catch (EndOfFile & e) { debug("GC socket disconnected"); state->fdRootsSocket.close(); @@ -619,6 +619,17 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) Path path = storeDir + "/" + std::string(baseName); Path realPath = realStoreDir + "/" + std::string(baseName); + /* There may be temp directories in the store that are still in use + by another process. We need to be sure that we can acquire an + exclusive lock before deleting them. */ + if (baseName.find("tmp-", 0) == 0) { + AutoCloseFD tmpDirFd = open(realPath.c_str(), O_RDONLY | O_DIRECTORY); + if (tmpDirFd.get() == -1 || !lockFile(tmpDirFd.get(), ltWrite, false)) { + debug("skipping locked tempdir '%s'", realPath); + return; + } + } + printInfo("deleting '%1%'", path); results.paths.insert(path); @@ -678,7 +689,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) alive.insert(start); try { StorePathSet closure; - computeFSClosure(*path, closure); + computeFSClosure(*path, closure, + /* flipDirection */ false, gcKeepOutputs, gcKeepDerivations); for (auto & p : closure) alive.insert(p); } catch (InvalidPath &) { } @@ -841,7 +853,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) if (unlink(path.c_str()) == -1) throw SysError("deleting '%1%'", path); - results.bytesFreed += st.st_size; + /* Do not accound for deleted file here. Rely on deletePath() + accounting. */ } struct stat st; diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index cc009a026..ff658c428 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -36,7 +36,6 @@ Settings::Settings() , nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR))) , nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR))) , nixUserConfFiles(getUserConfigFiles()) - , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR))) , nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR))) , nixManDir(canonPath(NIX_MAN_DIR)) , nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH))) @@ -67,12 +66,13 @@ Settings::Settings() sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL); #endif - -/* chroot-like behavior from Apple's sandbox */ + /* chroot-like behavior from Apple's sandbox */ #if __APPLE__ sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib"); allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh"); #endif + + buildHook = getSelfExe().value_or("nix") + " __build-remote"; } void loadConfFile() @@ -114,7 +114,13 @@ std::vector<Path> getUserConfigFiles() unsigned int Settings::getDefaultCores() { - return std::max(1U, std::thread::hardware_concurrency()); + const unsigned int concurrency = std::max(1U, std::thread::hardware_concurrency()); + const unsigned int maxCPU = getMaxCPU(); + + if (maxCPU > 0) + return maxCPU; + else + return concurrency; } StringSet Settings::getDefaultSystemFeatures() @@ -148,13 +154,9 @@ StringSet Settings::getDefaultExtraPlatforms() // machines. Note that we can’t force processes from executing // x86_64 in aarch64 environments or vice versa since they can // always exec with their own binary preferences. - if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist") || - pathExists("/System/Library/LaunchDaemons/com.apple.oahd.plist")) { - if (std::string{SYSTEM} == "x86_64-darwin") - extraPlatforms.insert("aarch64-darwin"); - else if (std::string{SYSTEM} == "aarch64-darwin") - extraPlatforms.insert("x86_64-darwin"); - } + if (std::string{SYSTEM} == "aarch64-darwin" && + runProgram(RunOptions {.program = "arch", .args = {"-arch", "x86_64", "/usr/bin/true"}, .mergeStderrToStdout = true}).first == 0) + extraPlatforms.insert("x86_64-darwin"); #endif return extraPlatforms; diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 3606f1db0..d3e86cc55 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -79,9 +79,6 @@ public: /* A list of user configuration files to load. */ std::vector<Path> nixUserConfFiles; - /* The directory where internal helper programs are stored. */ - Path nixLibexecDir; - /* The directory where the main programs are stored. */ Path nixBinDir; @@ -195,7 +192,7 @@ public: )", {"build-timeout"}}; - PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook", + PathSetting buildHook{this, true, "", "build-hook", "The path of the helper program that executes builds to remote machines."}; Setting<std::string> builders{ @@ -576,9 +573,15 @@ public: R"( If set to `true` (the default), any non-content-addressed path added or copied to the Nix store (e.g. when substituting from a binary - cache) must have a valid signature, that is, be signed using one of - the keys listed in `trusted-public-keys` or `secret-key-files`. Set - to `false` to disable signature checking. + cache) must have a signature by a trusted key. A trusted key is one + listed in `trusted-public-keys`, or a public key counterpart to a + private key stored in a file listed in `secret-key-files`. + + Set to `false` to disable signature checking and trust all + non-content-addressed paths unconditionally. + + (Content-addressed paths are inherently trustworthy and thus + unaffected by this configuration option.) )"}; Setting<StringSet> extraPlatforms{ @@ -629,6 +632,14 @@ public: are tried based on their Priority value, which each substituter can set independently. Lower value means higher priority. The default is `https://cache.nixos.org`, with a Priority of 40. + + Nix will copy a store path from a remote store only if one + of the following is true: + + - the store object is signed by one of the [`trusted-public-keys`](#conf-trusted-public-keys) + - the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list + - the [`require-sigs`](#conf-require-sigs) option has been set to `false` + - the store object is [output-addressed](glossary.md#gloss-output-addressed-store-object) )", {"binary-caches"}}; @@ -762,6 +773,13 @@ public: /nix/store/xfghy8ixrhz3kyy6p724iv3cxji088dx-bash-4.4-p23`. )"}; + Setting<unsigned int> downloadSpeed { + this, 0, "download-speed", + R"( + Specify the maximum transfer rate in kilobytes per second you want + Nix to use for downloads. + )"}; + Setting<std::string> netrcFile{ this, fmt("%s/%s", nixConfDir, "netrc"), "netrc-file", R"( @@ -815,7 +833,7 @@ public: )"}; Setting<StringSet> ignoredAcls{ - this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls", + this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls", R"( A list of ACLs that should be ignored, normally Nix attempts to remove all ACLs from files and directories in the Nix store, but @@ -893,55 +911,6 @@ public: are loaded as plugins (non-recursively). )"}; - Setting<StringMap> accessTokens{this, {}, "access-tokens", - R"( - Access tokens used to access protected GitHub, GitLab, or - other locations requiring token-based authentication. - - Access tokens are specified as a string made up of - space-separated `host=token` values. The specific token - used is selected by matching the `host` portion against the - "host" specification of the input. The actual use of the - `token` value is determined by the type of resource being - accessed: - - * Github: the token value is the OAUTH-TOKEN string obtained - as the Personal Access Token from the Github server (see - https://docs.github.com/en/developers/apps/authorizing-oath-apps). - - * Gitlab: the token value is either the OAuth2 token or the - Personal Access Token (these are different types tokens - for gitlab, see - https://docs.gitlab.com/12.10/ee/api/README.html#authentication). - The `token` value should be `type:tokenstring` where - `type` is either `OAuth2` or `PAT` to indicate which type - of token is being specified. - - Example `~/.config/nix/nix.conf`: - - ``` - access-tokens = github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk - ``` - - Example `~/code/flake.nix`: - - ```nix - input.foo = { - type = "gitlab"; - host = "gitlab.mycompany.com"; - owner = "mycompany"; - repo = "pro"; - }; - ``` - - This example specifies three tokens, one each for accessing - github.com, gitlab.mycompany.com, and sourceforge.net. - - The `input.foo` uses the "gitlab" fetcher, which might - requires specifying the token type along with the token - value. - )"}; - Setting<std::set<ExperimentalFeature>> experimentalFeatures{this, {}, "experimental-features", "Experimental Nix features to enable."}; @@ -949,18 +918,9 @@ public: void requireExperimentalFeature(const ExperimentalFeature &); - Setting<bool> allowDirty{this, true, "allow-dirty", - "Whether to allow dirty Git/Mercurial trees."}; - - Setting<bool> warnDirty{this, true, "warn-dirty", - "Whether to warn about dirty Git/Mercurial trees."}; - Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size", "Maximum size of NARs before spilling them to disk."}; - Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry", - "Path or URI of the global flake registry."}; - Setting<bool> allowSymlinkedStore{ this, false, "allow-symlinked-store", R"( @@ -973,19 +933,6 @@ public: resolves to a different location from that of the build machine. You can enable this setting if you are sure you're not going to do that. )"}; - - Setting<bool> useRegistries{this, true, "use-registries", - "Whether to use flake registries to resolve flake references."}; - - Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config", - "Whether to accept nix configuration from a flake without prompting."}; - - Setting<std::string> commitLockFileSummary{ - this, "", "commit-lockfile-summary", - R"( - The commit summary to use when committing changed flake lock files. If - empty, the summary is generated based on the action performed. - )"}; }; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 3cb5efdbf..73bcd6e81 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -161,7 +161,12 @@ protected: void getFile(const std::string & path, Callback<std::optional<std::string>> callback) noexcept override { - checkEnabled(); + try { + checkEnabled(); + } catch (...) { + callback.rethrow(); + return; + } auto request(makeRequest(path)); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 6b3daae7f..dd34b19c6 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -2,6 +2,7 @@ #include "pool.hh" #include "remote-store.hh" #include "serve-protocol.hh" +#include "build-result.hh" #include "store-api.hh" #include "path-with-outputs.hh" #include "worker-protocol.hh" @@ -278,7 +279,7 @@ public: conn->to.flush(); - BuildResult status; + BuildResult status { .path = DerivedPath::Built { .drvPath = drvPath } }; status.status = (BuildResult::Status) readInt(conn->from); conn->from >> status.errorMsg; @@ -316,7 +317,7 @@ public: conn->to.flush(); - BuildResult result; + BuildResult result { .path = DerivedPath::Opaque { StorePath::dummy } }; result.status = (BuildResult::Status) readInt(conn->from); if (!result.success()) { diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index f754770f9..f20b1fa02 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -57,8 +57,7 @@ protected: AutoDelete del(tmp, false); StreamToSourceAdapter source(istream); writeFile(tmp, source); - if (rename(tmp.c_str(), path2.c_str())) - throw SysError("renaming '%1%' to '%2%'", tmp, path2); + renameFile(tmp, path2); del.cancel(); } @@ -69,6 +68,7 @@ protected: } catch (SysError & e) { if (e.errNo == ENOENT) throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path); + throw; } } @@ -107,7 +107,7 @@ bool LocalBinaryCacheStore::fileExists(const std::string & path) std::set<std::string> LocalBinaryCacheStore::uriSchemes() { - if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1") + if (getEnv("_NIX_FORCE_HTTP") == "1") return {}; else return {"file"}; diff --git a/src/libstore/local-fs-store.hh b/src/libstore/local-fs-store.hh index d34f0cb62..e6fb3201a 100644 --- a/src/libstore/local-fs-store.hh +++ b/src/libstore/local-fs-store.hh @@ -1,6 +1,8 @@ #pragma once #include "store-api.hh" +#include "gc-store.hh" +#include "log-store.hh" namespace nix { @@ -23,7 +25,10 @@ struct LocalFSStoreConfig : virtual StoreConfig "physical path to the Nix store"}; }; -class LocalFSStore : public virtual LocalFSStoreConfig, public virtual Store +class LocalFSStore : public virtual LocalFSStoreConfig, + public virtual Store, + public virtual GcStore, + public virtual LogStore { public: diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 9149724f2..04223d860 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -81,7 +81,7 @@ int getSchema(Path schemaPath) void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd) { - const int nixCASchemaVersion = 3; + const int nixCASchemaVersion = 4; int curCASchema = getSchema(schemaPath); if (curCASchema != nixCASchemaVersion) { if (curCASchema > nixCASchemaVersion) { @@ -143,7 +143,22 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd) )"); txn.commit(); } - writeFile(schemaPath, fmt("%d", nixCASchemaVersion)); + if (curCASchema < 4) { + SQLiteTxn txn(db); + db.exec(R"( + create trigger if not exists DeleteSelfRefsViaRealisations before delete on ValidPaths + begin + delete from RealisationsRefs where realisationReference in ( + select id from Realisations where outputPath = old.id + ); + end; + -- used by deletion trigger + create index if not exists IndexRealisationsRefsRealisationReference on RealisationsRefs(realisationReference); + )"); + txn.commit(); + } + + writeFile(schemaPath, fmt("%d", nixCASchemaVersion), 0666, true); lockFile(lockFd.get(), ltRead, true); } } @@ -266,7 +281,7 @@ LocalStore::LocalStore(const Params & params) else if (curSchema == 0) { /* new store */ curSchema = nixSchemaVersion; openDB(*state, true); - writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str()); + writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true); } else if (curSchema < nixSchemaVersion) { @@ -314,7 +329,7 @@ LocalStore::LocalStore(const Params & params) txn.commit(); } - writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str()); + writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true); lockFile(globalLock.get(), ltRead, true); } @@ -482,18 +497,18 @@ void LocalStore::openDB(State & state, bool create) SQLiteStmt stmt; stmt.create(db, "pragma main.journal_mode;"); if (sqlite3_step(stmt) != SQLITE_ROW) - throwSQLiteError(db, "querying journal mode"); + SQLiteError::throw_(db, "querying journal mode"); prevMode = std::string((const char *) sqlite3_column_text(stmt, 0)); } if (prevMode != mode && sqlite3_exec(db, ("pragma main.journal_mode = " + mode + ";").c_str(), 0, 0, 0) != SQLITE_OK) - throwSQLiteError(db, "setting journal mode"); + SQLiteError::throw_(db, "setting journal mode"); /* Increase the auto-checkpoint interval to 40000 pages. This seems enough to ensure that instantiating the NixOS system derivation is done in a single fsync(). */ if (mode == "wal" && sqlite3_exec(db, "pragma wal_autocheckpoint = 40000;", 0, 0, 0) != SQLITE_OK) - throwSQLiteError(db, "setting autocheckpoint interval"); + SQLiteError::throw_(db, "setting autocheckpoint interval"); /* Initialise the database schema, if necessary. */ if (create) { @@ -702,31 +717,38 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat // combinations that are currently prohibited. drv.type(); - std::optional<Hash> h; + std::optional<DrvHash> hashesModulo; for (auto & i : drv.outputs) { std::visit(overloaded { - [&](const DerivationOutputInputAddressed & doia) { - if (!h) { + [&](const DerivationOutput::InputAddressed & doia) { + if (!hashesModulo) { // somewhat expensive so we do lazily - auto temp = hashDerivationModulo(*this, drv, true); - h = std::get<Hash>(temp); + hashesModulo = hashDerivationModulo(*this, drv, true); } - StorePath recomputed = makeOutputPath(i.first, *h, drvName); + auto currentOutputHash = get(hashesModulo->hashes, i.first); + if (!currentOutputHash) + throw Error("derivation '%s' has unexpected output '%s' (local-store / hashesModulo) named '%s'", + printStorePath(drvPath), printStorePath(doia.path), i.first); + StorePath recomputed = makeOutputPath(i.first, *currentOutputHash, drvName); if (doia.path != recomputed) throw Error("derivation '%s' has incorrect output '%s', should be '%s'", printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed)); envHasRightPath(doia.path, i.first); }, - [&](const DerivationOutputCAFixed & dof) { + [&](const DerivationOutput::CAFixed & dof) { StorePath path = makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName); envHasRightPath(path, i.first); }, - [&](const DerivationOutputCAFloating &) { + [&](const DerivationOutput::CAFloating &) { /* Nothing to check */ }, - [&](const DerivationOutputDeferred &) { + [&](const DerivationOutput::Deferred &) { + /* Nothing to check */ }, - }, i.second.output); + [&](const DerivationOutput::Impure &) { + /* Nothing to check */ + }, + }, i.second.raw()); } } @@ -736,7 +758,7 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info)) registerDrvOutput(info); else - throw Error("cannot register realisation '%s' because it lacks a valid signature", info.outPath.to_string()); + throw Error("cannot register realisation '%s' because it lacks a signature by a trusted key", info.outPath.to_string()); } void LocalStore::registerDrvOutput(const Realisation & info) @@ -1251,7 +1273,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) { if (checkSigs && pathInfoIsUntrusted(info)) - throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path)); + throw Error("cannot add path '%s' because it lacks a signature by a trusted key", printStorePath(info.path)); addTempRoot(info.path); @@ -1367,13 +1389,15 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name std::unique_ptr<AutoDelete> delTempDir; Path tempPath; + Path tempDir; + AutoCloseFD tempDirFd; if (!inMemory) { /* Drain what we pulled so far, and then keep on pulling */ StringSource dumpSource { dump }; ChainSource bothSource { dumpSource, source }; - auto tempDir = createTempDir(realStoreDir, "add"); + std::tie(tempDir, tempDirFd) = createTempDirInStore(); delTempDir = std::make_unique<AutoDelete>(tempDir); tempPath = tempDir + "/x"; @@ -1415,8 +1439,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name writeFile(realPath, dumpSource); } else { /* Move the temporary path we restored above. */ - if (rename(tempPath.c_str(), realPath.c_str())) - throw Error("renaming '%s' to '%s'", tempPath, realPath); + moveFile(tempPath, realPath); } /* For computing the nar hash. In recursive SHA-256 mode, this @@ -1493,18 +1516,24 @@ StorePath LocalStore::addTextToStore( /* Create a temporary directory in the store that won't be - garbage-collected. */ -Path LocalStore::createTempDirInStore() + garbage-collected until the returned FD is closed. */ +std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore() { - Path tmpDir; + Path tmpDirFn; + AutoCloseFD tmpDirFd; + bool lockedByUs = false; do { /* There is a slight possibility that `tmpDir' gets deleted by - the GC between createTempDir() and addTempRoot(), so repeat - until `tmpDir' exists. */ - tmpDir = createTempDir(realStoreDir); - addTempRoot(parseStorePath(tmpDir)); - } while (!pathExists(tmpDir)); - return tmpDir; + the GC between createTempDir() and when we acquire a lock on it. + We'll repeat until 'tmpDir' exists and we've locked it. */ + tmpDirFn = createTempDir(realStoreDir, "tmp"); + tmpDirFd = open(tmpDirFn.c_str(), O_RDONLY | O_DIRECTORY); + if (tmpDirFd.get() < 0) { + continue; + } + lockedByUs = lockFile(tmpDirFd.get(), ltWrite, true); + } while (!pathExists(tmpDirFn) || !lockedByUs); + return {tmpDirFn, std::move(tmpDirFd)}; } @@ -1927,8 +1956,7 @@ void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log) writeFile(tmpFile, compress("bzip2", log)); - if (rename(tmpFile.c_str(), logPath.c_str()) != 0) - throw SysError("renaming '%1%' to '%2%'", tmpFile, logPath); + renameFile(tmpFile, logPath); } std::optional<std::string> LocalStore::getVersion() diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index a1fe31dbb..4579c2f62 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -5,6 +5,7 @@ #include "pathlocks.hh" #include "store-api.hh" #include "local-fs-store.hh" +#include "gc-store.hh" #include "sync.hh" #include "util.hh" @@ -43,7 +44,7 @@ struct LocalStoreConfig : virtual LocalFSStoreConfig }; -class LocalStore : public virtual LocalStoreConfig, public virtual LocalFSStore +class LocalStore : public virtual LocalStoreConfig, public virtual LocalFSStore, public virtual GcStore { private: @@ -255,7 +256,7 @@ private: void findRuntimeRoots(Roots & roots, bool censor); - Path createTempDirInStore(); + std::pair<Path, AutoCloseFD> createTempDirInStore(); void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv); diff --git a/src/libstore/local.mk b/src/libstore/local.mk index b992bcbc0..1d26ac918 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -39,14 +39,23 @@ libstore_CXXFLAGS += \ -DNIX_STATE_DIR=\"$(localstatedir)/nix\" \ -DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \ -DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \ - -DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \ -DNIX_BIN_DIR=\"$(bindir)\" \ -DNIX_MAN_DIR=\"$(mandir)\" \ -DLSOF=\"$(lsof)\" +ifeq ($(embedded_sandbox_shell),yes) +libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\" + +$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh + +$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell) + $(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp + @mv $@.tmp $@ +else ifneq ($(sandbox_shell),) libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" endif +endif $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh diff --git a/src/libstore/lock.cc b/src/libstore/lock.cc index 02ccb1186..cc3977496 100644 --- a/src/libstore/lock.cc +++ b/src/libstore/lock.cc @@ -80,13 +80,30 @@ struct SimpleUserLock : UserLock /* Get the list of supplementary groups of this build user. This is usually either empty or contains a group such as "kvm". */ - lock->supplementaryGIDs.resize(10); - int ngroups = lock->supplementaryGIDs.size(); - int err = getgrouplist(pw->pw_name, pw->pw_gid, - lock->supplementaryGIDs.data(), &ngroups); + int ngroups = 32; // arbitrary initial guess + lock->supplementaryGIDs.resize(ngroups); + + int err = getgrouplist( + pw->pw_name, pw->pw_gid, + lock->supplementaryGIDs.data(), + &ngroups); + + /* Our initial size of 32 wasn't sufficient, the + correct size has been stored in ngroups, so we try + again. */ + if (err == -1) { + lock->supplementaryGIDs.resize(ngroups); + err = getgrouplist( + pw->pw_name, pw->pw_gid, + lock->supplementaryGIDs.data(), + &ngroups); + } + + // If it failed once more, then something must be broken. if (err == -1) throw Error("failed to get list of supplementary groups for '%s'", pw->pw_name); + // Finally, trim back the GID list to its real size. lock->supplementaryGIDs.resize(ngroups); #endif diff --git a/src/libstore/lock.hh b/src/libstore/lock.hh index bfb55b0d9..4b6d34069 100644 --- a/src/libstore/lock.hh +++ b/src/libstore/lock.hh @@ -2,6 +2,8 @@ #include "types.hh" +#include <optional> + namespace nix { struct UserLock diff --git a/src/libstore/log-store.hh b/src/libstore/log-store.hh new file mode 100644 index 000000000..ff1b92e17 --- /dev/null +++ b/src/libstore/log-store.hh @@ -0,0 +1,21 @@ +#pragma once + +#include "store-api.hh" + + +namespace nix { + +struct LogStore : public virtual Store +{ + inline static std::string operationName = "Build log storage and retrieval"; + + /* Return the build log of the specified store path, if available, + or null otherwise. */ + virtual std::optional<std::string> getBuildLog(const StorePath & path) = 0; + + virtual void addBuildLog(const StorePath & path, std::string_view log) = 0; + + static LogStore & require(Store & store); +}; + +} diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc new file mode 100644 index 000000000..64d172918 --- /dev/null +++ b/src/libstore/make-content-addressed.cc @@ -0,0 +1,80 @@ +#include "make-content-addressed.hh" +#include "references.hh" + +namespace nix { + +std::map<StorePath, StorePath> makeContentAddressed( + Store & srcStore, + Store & dstStore, + const StorePathSet & storePaths) +{ + StorePathSet closure; + srcStore.computeFSClosure(storePaths, closure); + + auto paths = srcStore.topoSortPaths(closure); + + std::reverse(paths.begin(), paths.end()); + + std::map<StorePath, StorePath> remappings; + + for (auto & path : paths) { + auto pathS = srcStore.printStorePath(path); + auto oldInfo = srcStore.queryPathInfo(path); + std::string oldHashPart(path.hashPart()); + + StringSink sink; + srcStore.narFromPath(path, sink); + + StringMap rewrites; + + StorePathSet references; + bool hasSelfReference = false; + for (auto & ref : oldInfo->references) { + if (ref == path) + hasSelfReference = true; + else { + auto i = remappings.find(ref); + auto replacement = i != remappings.end() ? i->second : ref; + // FIXME: warn about unremapped paths? + if (replacement != ref) + rewrites.insert_or_assign(srcStore.printStorePath(ref), srcStore.printStorePath(replacement)); + references.insert(std::move(replacement)); + } + } + + sink.s = rewriteStrings(sink.s, rewrites); + + HashModuloSink hashModuloSink(htSHA256, oldHashPart); + hashModuloSink(sink.s); + + auto narModuloHash = hashModuloSink.finish().first; + + auto dstPath = dstStore.makeFixedOutputPath( + FileIngestionMethod::Recursive, narModuloHash, path.name(), references, hasSelfReference); + + printInfo("rewriting '%s' to '%s'", pathS, srcStore.printStorePath(dstPath)); + + StringSink sink2; + RewritingSink rsink2(oldHashPart, std::string(dstPath.hashPart()), sink2); + rsink2(sink.s); + rsink2.flush(); + + ValidPathInfo info { dstPath, hashString(htSHA256, sink2.s) }; + info.references = std::move(references); + if (hasSelfReference) info.references.insert(info.path); + info.narSize = sink.s.size(); + info.ca = FixedOutputHash { + .method = FileIngestionMethod::Recursive, + .hash = narModuloHash, + }; + + StringSource source(sink2.s); + dstStore.addToStore(info, source); + + remappings.insert_or_assign(std::move(path), std::move(info.path)); + } + + return remappings; +} + +} diff --git a/src/libstore/make-content-addressed.hh b/src/libstore/make-content-addressed.hh new file mode 100644 index 000000000..c4a66ed41 --- /dev/null +++ b/src/libstore/make-content-addressed.hh @@ -0,0 +1,12 @@ +#pragma once + +#include "store-api.hh" + +namespace nix { + +std::map<StorePath, StorePath> makeContentAddressed( + Store & srcStore, + Store & dstStore, + const StorePathSet & storePaths); + +} diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 6409874ff..fb985c97b 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -87,7 +87,7 @@ std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv) { auto out = drv.outputs.find("out"); if (out != drv.outputs.end()) { - if (auto v = std::get_if<DerivationOutputCAFixed>(&out->second.output)) + if (const auto * v = std::get_if<DerivationOutput::CAFixed>(&out->second.raw())) return v->hash; } return std::nullopt; @@ -277,15 +277,20 @@ std::map<DrvOutput, StorePath> drvOutputReferences( { std::set<Realisation> inputRealisations; - for (const auto& [inputDrv, outputNames] : drv.inputDrvs) { - auto outputHashes = + for (const auto & [inputDrv, outputNames] : drv.inputDrvs) { + const auto outputHashes = staticOutputHashes(store, store.readDerivation(inputDrv)); - for (const auto& outputName : outputNames) { + for (const auto & outputName : outputNames) { + auto outputHash = get(outputHashes, outputName); + if (!outputHash) + throw Error( + "output '%s' of derivation '%s' isn't realised", outputName, + store.printStorePath(inputDrv)); auto thisRealisation = store.queryRealisation( - DrvOutput{outputHashes.at(outputName), outputName}); + DrvOutput{*outputHash, outputName}); if (!thisRealisation) throw Error( - "output '%s' of derivation '%s' isn’t built", outputName, + "output '%s' of derivation '%s' isn't built", outputName, store.printStorePath(inputDrv)); inputRealisations.insert(*thisRealisation); } @@ -295,4 +300,5 @@ std::map<DrvOutput, StorePath> drvOutputReferences( return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references); } + } diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 72d41cc94..398147fc3 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -75,6 +75,9 @@ struct NarAccessor : public FSAccessor createMember(path, {FSAccessor::Type::tRegular, false, 0, 0}); } + void closeRegularFile() override + { } + void isExecutable() override { parents.top()->isExecutable = true; diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 9dd81ddfb..f4ea739b0 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -62,6 +62,9 @@ public: /* How often to purge expired entries from the cache. */ const int purgeInterval = 24 * 3600; + /* How long to cache binary cache info (i.e. /nix-cache-info) */ + const int cacheInfoTtl = 7 * 24 * 3600; + struct Cache { int id; @@ -98,7 +101,7 @@ public: "insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)"); state->queryCache.create(state->db, - "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ?"); + "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ? and timestamp > ?"); state->insertNAR.create(state->db, "insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, " @@ -183,7 +186,7 @@ public: auto i = state->caches.find(uri); if (i == state->caches.end()) { - auto queryCache(state->queryCache.use()(uri)); + auto queryCache(state->queryCache.use()(uri)(time(0) - cacheInfoTtl)); if (!queryCache.next()) return std::nullopt; state->caches.emplace(uri, diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 2d75e7a82..071d8355e 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -69,8 +69,6 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & if (value != "unknown-deriver") deriver = StorePath(value); } - else if (name == "System") - system = value; else if (name == "Sig") sigs.insert(value); else if (name == "CA") { @@ -106,9 +104,6 @@ std::string NarInfo::to_string(const Store & store) const if (deriver) res += "Deriver: " + std::string(deriver->to_string()) + "\n"; - if (!system.empty()) - res += "System: " + system + "\n"; - for (auto sig : sigs) res += "Sig: " + sig + "\n"; diff --git a/src/libstore/nar-info.hh b/src/libstore/nar-info.hh index 39ced76e5..01683ec73 100644 --- a/src/libstore/nar-info.hh +++ b/src/libstore/nar-info.hh @@ -14,7 +14,6 @@ struct NarInfo : ValidPathInfo std::string compression; std::optional<Hash> fileHash; uint64_t fileSize = 0; - std::string system; NarInfo() = delete; NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { } diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 8af9b1dde..4d2781180 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -229,7 +229,9 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, } /* Atomically replace the old file with the new hard link. */ - if (rename(tempLink.c_str(), path.c_str()) == -1) { + try { + renameFile(tempLink, path); + } catch (SysError & e) { if (unlink(tempLink.c_str()) == -1) printError("unable to unlink '%1%'", tempLink); if (errno == EMLINK) { @@ -240,7 +242,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, debug("'%s' has reached maximum number of links", linkPath); return; } - throw SysError("cannot rename '%1%' to '%2%'", tempLink, path); + throw; } stats.filesLinked++; diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 8c65053e4..f2288a04e 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -93,7 +93,7 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const StringSet res; for (auto & i : getStringsAttr("requiredSystemFeatures").value_or(Strings())) res.insert(i); - if (!derivationHasKnownOutputPaths(drv.type())) + if (!drv.type().hasKnownOutputPaths()) res.insert("ca-derivations"); return res; } diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh index effcf099d..95bec21e8 100644 --- a/src/libstore/parsed-derivations.hh +++ b/src/libstore/parsed-derivations.hh @@ -1,5 +1,6 @@ #pragma once +#include "derivations.hh" #include "store-api.hh" #include <nlohmann/json_fwd.hpp> diff --git a/src/libstore/path-with-outputs.cc b/src/libstore/path-with-outputs.cc index 97aa01b57..d6d67ea05 100644 --- a/src/libstore/path-with-outputs.cc +++ b/src/libstore/path-with-outputs.cc @@ -1,5 +1,8 @@ #include "path-with-outputs.hh" #include "store-api.hh" +#include "nlohmann/json.hpp" + +#include <regex> namespace nix { @@ -22,9 +25,9 @@ DerivedPath StorePathWithOutputs::toDerivedPath() const std::vector<DerivedPath> toDerivedPaths(const std::vector<StorePathWithOutputs> ss) { - std::vector<DerivedPath> reqs; - for (auto & s : ss) reqs.push_back(s.toDerivedPath()); - return reqs; + std::vector<DerivedPath> reqs; + for (auto & s : ss) reqs.push_back(s.toDerivedPath()); + return reqs; } @@ -68,4 +71,57 @@ StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std: return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) }; } +std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s) +{ + static std::regex regex(R"((.*)\^((\*)|([a-z]+(,[a-z]+)*)))"); + + std::smatch match; + if (!std::regex_match(s, match, regex)) + return {s, DefaultOutputs()}; + + if (match[3].matched) + return {match[1], AllOutputs()}; + + return {match[1], tokenizeString<OutputNames>(match[4].str(), ",")}; +} + +std::string printOutputsSpec(const OutputsSpec & outputsSpec) +{ + if (std::get_if<DefaultOutputs>(&outputsSpec)) + return ""; + + if (std::get_if<AllOutputs>(&outputsSpec)) + return "^*"; + + if (auto outputNames = std::get_if<OutputNames>(&outputsSpec)) + return "^" + concatStringsSep(",", *outputNames); + + assert(false); +} + +void to_json(nlohmann::json & json, const OutputsSpec & outputsSpec) +{ + if (std::get_if<DefaultOutputs>(&outputsSpec)) + json = nullptr; + + else if (std::get_if<AllOutputs>(&outputsSpec)) + json = std::vector<std::string>({"*"}); + + else if (auto outputNames = std::get_if<OutputNames>(&outputsSpec)) + json = *outputNames; +} + +void from_json(const nlohmann::json & json, OutputsSpec & outputsSpec) +{ + if (json.is_null()) + outputsSpec = DefaultOutputs(); + else { + auto names = json.get<OutputNames>(); + if (names == OutputNames({"*"})) + outputsSpec = AllOutputs(); + else + outputsSpec = names; + } +} + } diff --git a/src/libstore/path-with-outputs.hh b/src/libstore/path-with-outputs.hh index 4c4023dcb..0cb5eb223 100644 --- a/src/libstore/path-with-outputs.hh +++ b/src/libstore/path-with-outputs.hh @@ -4,6 +4,7 @@ #include "path.hh" #include "derived-path.hh" +#include "nlohmann/json_fwd.hpp" namespace nix { @@ -32,4 +33,25 @@ StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs); +typedef std::set<std::string> OutputNames; + +struct AllOutputs { + bool operator < (const AllOutputs & _) const { return false; } +}; + +struct DefaultOutputs { + bool operator < (const DefaultOutputs & _) const { return false; } +}; + +typedef std::variant<DefaultOutputs, AllOutputs, OutputNames> OutputsSpec; + +/* Parse a string of the form 'prefix^output1,...outputN' or + 'prefix^*', returning the prefix and the outputs spec. */ +std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s); + +std::string printOutputsSpec(const OutputsSpec & outputsSpec); + +void to_json(nlohmann::json &, const OutputsSpec &); +void from_json(const nlohmann::json &, OutputsSpec &); + } diff --git a/src/libstore/path.cc b/src/libstore/path.cc index e642abcd5..392db225e 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -1,5 +1,7 @@ #include "store-api.hh" +#include <sodium.h> + namespace nix { static void checkName(std::string_view path, std::string_view name) @@ -41,6 +43,13 @@ bool StorePath::isDerivation() const StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x"); +StorePath StorePath::random(std::string_view name) +{ + Hash hash(htSHA1); + randombytes_buf(hash.hash, hash.hashSize); + return StorePath(hash, name); +} + StorePath Store::parseStorePath(std::string_view path) const { auto p = canonPath(std::string(path)); diff --git a/src/libstore/path.hh b/src/libstore/path.hh index e65fee622..77fd0f8dc 100644 --- a/src/libstore/path.hh +++ b/src/libstore/path.hh @@ -58,6 +58,8 @@ public: } static StorePath dummy; + + static StorePath random(std::string_view name); }; typedef std::set<StorePath> StorePathSet; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index e7ffa6595..96a29155c 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -1,7 +1,9 @@ #include "serialise.hh" #include "util.hh" #include "path-with-outputs.hh" +#include "gc-store.hh" #include "remote-fs-accessor.hh" +#include "build-result.hh" #include "remote-store.hh" #include "worker-protocol.hh" #include "archive.hh" @@ -89,6 +91,35 @@ void write(const Store & store, Sink & out, const DrvOutput & drvOutput) } +BuildResult read(const Store & store, Source & from, Phantom<BuildResult> _) +{ + auto path = worker_proto::read(store, from, Phantom<DerivedPath> {}); + BuildResult res { .path = path }; + res.status = (BuildResult::Status) readInt(from); + from + >> res.errorMsg + >> res.timesBuilt + >> res.isNonDeterministic + >> res.startTime + >> res.stopTime; + res.builtOutputs = worker_proto::read(store, from, Phantom<DrvOutputs> {}); + return res; +} + +void write(const Store & store, Sink & to, const BuildResult & res) +{ + worker_proto::write(store, to, res.path); + to + << res.status + << res.errorMsg + << res.timesBuilt + << res.isNonDeterministic + << res.startTime + << res.stopTime; + worker_proto::write(store, to, res.builtOutputs); +} + + std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _) { auto s = readString(from); @@ -549,7 +580,6 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore( try { conn->to.written = 0; - conn->to.warn = true; connections->incCapacity(); { Finally cleanup([&]() { connections->decCapacity(); }); @@ -560,7 +590,6 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore( dumpString(contents, conn->to); } } - conn->to.warn = false; conn.processStderr(); } catch (SysError & e) { /* Daemon closed while we were sending the path. Probably OOM @@ -643,6 +672,23 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, void RemoteStore::addMultipleToStore( + PathsSource & pathsToCopy, + Activity & act, + RepairFlag repair, + CheckSigsFlag checkSigs) +{ + auto source = sinkToSource([&](Sink & sink) { + sink << pathsToCopy.size(); + for (auto & [pathInfo, pathSource] : pathsToCopy) { + pathInfo.write(sink, *this, 16); + pathSource->drainInto(sink); + } + }); + + addMultipleToStore(*source, repair, checkSigs); +} + +void RemoteStore::addMultipleToStore( Source & source, RepairFlag repair, CheckSigsFlag checkSigs) @@ -687,36 +733,34 @@ void RemoteStore::registerDrvOutput(const Realisation & info) void RemoteStore::queryRealisationUncached(const DrvOutput & id, Callback<std::shared_ptr<const Realisation>> callback) noexcept { - auto conn(getConnection()); + try { + auto conn(getConnection()); - if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 27) { - warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4"); - try { - callback(nullptr); - } catch (...) { return callback.rethrow(); } - } + if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 27) { + warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4"); + return callback(nullptr); + } - conn->to << wopQueryRealisation; - conn->to << id.to_string(); - conn.processStderr(); + conn->to << wopQueryRealisation; + conn->to << id.to_string(); + conn.processStderr(); - auto real = [&]() -> std::shared_ptr<const Realisation> { - if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) { - auto outPaths = worker_proto::read( - *this, conn->from, Phantom<std::set<StorePath>> {}); - if (outPaths.empty()) - return nullptr; - return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() }); - } else { - auto realisations = worker_proto::read( - *this, conn->from, Phantom<std::set<Realisation>> {}); - if (realisations.empty()) - return nullptr; - return std::make_shared<const Realisation>(*realisations.begin()); - } - }(); + auto real = [&]() -> std::shared_ptr<const Realisation> { + if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) { + auto outPaths = worker_proto::read( + *this, conn->from, Phantom<std::set<StorePath>> {}); + if (outPaths.empty()) + return nullptr; + return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() }); + } else { + auto realisations = worker_proto::read( + *this, conn->from, Phantom<std::set<Realisation>> {}); + if (realisations.empty()) + return nullptr; + return std::make_shared<const Realisation>(*realisations.begin()); + } + }(); - try { callback(std::shared_ptr<const Realisation>(real)); } catch (...) { return callback.rethrow(); } } @@ -745,17 +789,24 @@ static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, cons } } -void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) +void RemoteStore::copyDrvsFromEvalStore( + const std::vector<DerivedPath> & paths, + std::shared_ptr<Store> evalStore) { if (evalStore && evalStore.get() != this) { /* The remote doesn't have a way to access evalStore, so copy the .drvs. */ RealisedPath::Set drvPaths2; - for (auto & i : drvPaths) + for (auto & i : paths) if (auto p = std::get_if<DerivedPath::Built>(&i)) drvPaths2.insert(p->drvPath); copyClosure(*evalStore, *this, drvPaths2); } +} + +void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) +{ + copyDrvsFromEvalStore(drvPaths, evalStore); auto conn(getConnection()); conn->to << wopBuildPaths; @@ -772,6 +823,92 @@ void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMod readInt(conn->from); } +std::vector<BuildResult> RemoteStore::buildPathsWithResults( + const std::vector<DerivedPath> & paths, + BuildMode buildMode, + std::shared_ptr<Store> evalStore) +{ + copyDrvsFromEvalStore(paths, evalStore); + + std::optional<ConnectionHandle> conn_(getConnection()); + auto & conn = *conn_; + + if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 34) { + conn->to << wopBuildPathsWithResults; + writeDerivedPaths(*this, conn, paths); + conn->to << buildMode; + conn.processStderr(); + return worker_proto::read(*this, conn->from, Phantom<std::vector<BuildResult>> {}); + } else { + // Avoid deadlock. + conn_.reset(); + + // Note: this throws an exception if a build/substitution + // fails, but meh. + buildPaths(paths, buildMode, evalStore); + + std::vector<BuildResult> results; + + for (auto & path : paths) { + std::visit( + overloaded { + [&](const DerivedPath::Opaque & bo) { + results.push_back(BuildResult { + .status = BuildResult::Substituted, + .path = bo, + }); + }, + [&](const DerivedPath::Built & bfd) { + BuildResult res { + .status = BuildResult::Built, + .path = bfd, + }; + + OutputPathMap outputs; + auto drv = evalStore->readDerivation(bfd.drvPath); + const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive + const auto drvOutputs = drv.outputsAndOptPaths(*this); + for (auto & output : bfd.outputs) { + auto outputHash = get(outputHashes, output); + if (!outputHash) + throw Error( + "the derivation '%s' doesn't have an output named '%s'", + printStorePath(bfd.drvPath), output); + auto outputId = DrvOutput{ *outputHash, output }; + if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) { + auto realisation = + queryRealisation(outputId); + if (!realisation) + throw Error( + "cannot operate on an output of unbuilt " + "content-addressed derivation '%s'", + outputId.to_string()); + res.builtOutputs.emplace(realisation->id, *realisation); + } else { + // If ca-derivations isn't enabled, assume that + // the output path is statically known. + const auto drvOutput = get(drvOutputs, output); + assert(drvOutput); + assert(drvOutput->second); + res.builtOutputs.emplace( + outputId, + Realisation { + .id = outputId, + .outPath = *drvOutput->second, + }); + } + } + + results.push_back(res); + } + }, + path.raw()); + } + + return results; + } +} + BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) @@ -781,7 +918,7 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD writeDerivation(conn->to, *this, drv); conn->to << buildMode; conn.processStderr(); - BuildResult res; + BuildResult res { .path = DerivedPath::Built { .drvPath = drvPath } }; res.status = (BuildResult::Status) readInt(conn->from); conn->from >> res.errorMsg; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) { diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index b94216d31..11d089cd2 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -4,6 +4,8 @@ #include <string> #include "store-api.hh" +#include "gc-store.hh" +#include "log-store.hh" namespace nix { @@ -29,7 +31,10 @@ struct RemoteStoreConfig : virtual StoreConfig /* FIXME: RemoteStore is a misnomer - should be something like DaemonStore. */ -class RemoteStore : public virtual RemoteStoreConfig, public virtual Store +class RemoteStore : public virtual RemoteStoreConfig, + public virtual Store, + public virtual GcStore, + public virtual LogStore { public: @@ -83,6 +88,12 @@ public: RepairFlag repair, CheckSigsFlag checkSigs) override; + void addMultipleToStore( + PathsSource & pathsToCopy, + Activity & act, + RepairFlag repair, + CheckSigsFlag checkSigs) override; + StorePath addTextToStore( std::string_view name, std::string_view s, @@ -96,6 +107,11 @@ public: void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override; + std::vector<BuildResult> buildPathsWithResults( + const std::vector<DerivedPath> & paths, + BuildMode buildMode, + std::shared_ptr<Store> evalStore) override; + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode) override; @@ -170,6 +186,9 @@ private: std::atomic_bool failed{false}; + void copyDrvsFromEvalStore( + const std::vector<DerivedPath> & paths, + std::shared_ptr<Store> evalStore); }; diff --git a/src/libstore/repair-flag.hh b/src/libstore/repair-flag.hh new file mode 100644 index 000000000..a13cda312 --- /dev/null +++ b/src/libstore/repair-flag.hh @@ -0,0 +1,7 @@ +#pragma once + +namespace nix { + +enum RepairFlag : bool { NoRepair = false, Repair = true }; + +} diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh index 3f55c74db..cdb3e5908 100644 --- a/src/libstore/s3.hh +++ b/src/libstore/s3.hh @@ -5,6 +5,7 @@ #include "ref.hh" #include <optional> +#include <string> namespace Aws { namespace Client { class ClientConfiguration; } } namespace Aws { namespace S3 { class S3Client; } } diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb index 56b35c3fe..d9d710559 100644 --- a/src/libstore/sandbox-defaults.sb +++ b/src/libstore/sandbox-defaults.sb @@ -98,7 +98,9 @@ (allow file* (literal "/private/var/select/sh")) -; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin. +; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin (and vice versa). (allow file-read* (subpath "/Library/Apple/usr/libexec/oah") - (subpath "/System/Library/Apple/usr/libexec/oah")) + (subpath "/System/Library/Apple/usr/libexec/oah") + (subpath "/System/Library/LaunchDaemons/com.apple.oahd.plist") + (subpath "/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist")) diff --git a/src/libstore/sandbox-network.sb b/src/libstore/sandbox-network.sb index 56beec761..19e9eea9a 100644 --- a/src/libstore/sandbox-network.sb +++ b/src/libstore/sandbox-network.sb @@ -14,3 +14,7 @@ ; Allow DNS lookups. (allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder"))) + +; Allow access to trustd. +(allow mach-lookup (global-name "com.apple.trustd")) +(allow mach-lookup (global-name "com.apple.trustd.agent")) diff --git a/src/libstore/schema.sql b/src/libstore/schema.sql index 09c71a2b8..d65e5335e 100644 --- a/src/libstore/schema.sql +++ b/src/libstore/schema.sql @@ -1,7 +1,7 @@ create table if not exists ValidPaths ( id integer primary key autoincrement not null, path text unique not null, - hash text not null, + hash text not null, -- base16 representation registrationTime integer not null, deriver text, narSize integer, diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index e6ecadd7f..2090beabd 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -8,22 +8,32 @@ namespace nix { -[[noreturn]] void throwSQLiteError(sqlite3 * db, const FormatOrString & fs) +SQLiteError::SQLiteError(const char *path, int errNo, int extendedErrNo, hintformat && hf) + : Error(""), path(path), errNo(errNo), extendedErrNo(extendedErrNo) +{ + err.msg = hintfmt("%s: %s (in '%s')", + normaltxt(hf.str()), + sqlite3_errstr(extendedErrNo), + path ? path : "(in-memory)"); +} + +[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintformat && hf) { int err = sqlite3_errcode(db); int exterr = sqlite3_extended_errcode(db); auto path = sqlite3_db_filename(db, nullptr); - if (!path) path = "(in-memory)"; if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { - throw SQLiteBusy( + auto exp = SQLiteBusy(path, err, exterr, std::move(hf)); + exp.err.msg = hintfmt( err == SQLITE_PROTOCOL - ? fmt("SQLite database '%s' is busy (SQLITE_PROTOCOL)", path) - : fmt("SQLite database '%s' is busy", path)); - } - else - throw SQLiteError("%s: %s (in '%s')", fs.s, sqlite3_errstr(exterr), path); + ? "SQLite database '%s' is busy (SQLITE_PROTOCOL)" + : "SQLite database '%s' is busy", + path ? path : "(in-memory)"); + throw exp; + } else + throw SQLiteError(path, err, exterr, std::move(hf)); } SQLite::SQLite(const Path & path, bool create) @@ -37,7 +47,7 @@ SQLite::SQLite(const Path & path, bool create) throw Error("cannot open SQLite database '%s'", path); if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK) - throwSQLiteError(db, "setting timeout"); + SQLiteError::throw_(db, "setting timeout"); exec("pragma foreign_keys = 1"); } @@ -46,7 +56,7 @@ SQLite::~SQLite() { try { if (db && sqlite3_close(db) != SQLITE_OK) - throwSQLiteError(db, "closing database"); + SQLiteError::throw_(db, "closing database"); } catch (...) { ignoreException(); } @@ -62,7 +72,7 @@ void SQLite::exec(const std::string & stmt) { retrySQLite<void>([&]() { if (sqlite3_exec(db, stmt.c_str(), 0, 0, 0) != SQLITE_OK) - throwSQLiteError(db, format("executing SQLite statement '%s'") % stmt); + SQLiteError::throw_(db, "executing SQLite statement '%s'", stmt); }); } @@ -76,7 +86,7 @@ void SQLiteStmt::create(sqlite3 * db, const std::string & sql) checkInterrupt(); assert(!stmt); if (sqlite3_prepare_v2(db, sql.c_str(), -1, &stmt, 0) != SQLITE_OK) - throwSQLiteError(db, fmt("creating statement '%s'", sql)); + SQLiteError::throw_(db, "creating statement '%s'", sql); this->db = db; this->sql = sql; } @@ -85,7 +95,7 @@ SQLiteStmt::~SQLiteStmt() { try { if (stmt && sqlite3_finalize(stmt) != SQLITE_OK) - throwSQLiteError(db, fmt("finalizing statement '%s'", sql)); + SQLiteError::throw_(db, "finalizing statement '%s'", sql); } catch (...) { ignoreException(); } @@ -109,7 +119,7 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (std::string_view value, bool not { if (notNull) { if (sqlite3_bind_text(stmt, curArg++, value.data(), -1, SQLITE_TRANSIENT) != SQLITE_OK) - throwSQLiteError(stmt.db, "binding argument"); + SQLiteError::throw_(stmt.db, "binding argument"); } else bind(); return *this; @@ -119,7 +129,7 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (const unsigned char * data, size { if (notNull) { if (sqlite3_bind_blob(stmt, curArg++, data, len, SQLITE_TRANSIENT) != SQLITE_OK) - throwSQLiteError(stmt.db, "binding argument"); + SQLiteError::throw_(stmt.db, "binding argument"); } else bind(); return *this; @@ -129,7 +139,7 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (int64_t value, bool notNull) { if (notNull) { if (sqlite3_bind_int64(stmt, curArg++, value) != SQLITE_OK) - throwSQLiteError(stmt.db, "binding argument"); + SQLiteError::throw_(stmt.db, "binding argument"); } else bind(); return *this; @@ -138,7 +148,7 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (int64_t value, bool notNull) SQLiteStmt::Use & SQLiteStmt::Use::bind() { if (sqlite3_bind_null(stmt, curArg++) != SQLITE_OK) - throwSQLiteError(stmt.db, "binding argument"); + SQLiteError::throw_(stmt.db, "binding argument"); return *this; } @@ -152,14 +162,14 @@ void SQLiteStmt::Use::exec() int r = step(); assert(r != SQLITE_ROW); if (r != SQLITE_DONE) - throwSQLiteError(stmt.db, fmt("executing SQLite statement '%s'", sqlite3_expanded_sql(stmt.stmt))); + SQLiteError::throw_(stmt.db, fmt("executing SQLite statement '%s'", sqlite3_expanded_sql(stmt.stmt))); } bool SQLiteStmt::Use::next() { int r = step(); if (r != SQLITE_DONE && r != SQLITE_ROW) - throwSQLiteError(stmt.db, fmt("executing SQLite query '%s'", sqlite3_expanded_sql(stmt.stmt))); + SQLiteError::throw_(stmt.db, fmt("executing SQLite query '%s'", sqlite3_expanded_sql(stmt.stmt))); return r == SQLITE_ROW; } @@ -185,14 +195,14 @@ SQLiteTxn::SQLiteTxn(sqlite3 * db) { this->db = db; if (sqlite3_exec(db, "begin;", 0, 0, 0) != SQLITE_OK) - throwSQLiteError(db, "starting transaction"); + SQLiteError::throw_(db, "starting transaction"); active = true; } void SQLiteTxn::commit() { if (sqlite3_exec(db, "commit;", 0, 0, 0) != SQLITE_OK) - throwSQLiteError(db, "committing transaction"); + SQLiteError::throw_(db, "committing transaction"); active = false; } @@ -200,7 +210,7 @@ SQLiteTxn::~SQLiteTxn() { try { if (active && sqlite3_exec(db, "rollback;", 0, 0, 0) != SQLITE_OK) - throwSQLiteError(db, "aborting transaction"); + SQLiteError::throw_(db, "aborting transaction"); } catch (...) { ignoreException(); } @@ -215,7 +225,6 @@ void handleSQLiteBusy(const SQLiteBusy & e) if (now > lastWarned + 10) { lastWarned = now; logWarning({ - .name = "Sqlite busy", .msg = hintfmt(e.what()) }); } diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 99f0d56ce..1d1c553ea 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -96,10 +96,30 @@ struct SQLiteTxn }; -MakeError(SQLiteError, Error); -MakeError(SQLiteBusy, SQLiteError); +struct SQLiteError : Error +{ + const char *path; + int errNo, extendedErrNo; + + template<typename... Args> + [[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args & ... args) { + throw_(db, hintfmt(fs, args...)); + } + + SQLiteError(const char *path, int errNo, int extendedErrNo, hintformat && hf); + +protected: -[[noreturn]] void throwSQLiteError(sqlite3 * db, const FormatOrString & fs); + template<typename... Args> + SQLiteError(const char *path, int errNo, int extendedErrNo, const std::string & fs, const Args & ... args) + : SQLiteError(path, errNo, extendedErrNo, hintfmt(fs, args...)) + { } + + [[noreturn]] static void throw_(sqlite3 * db, hintformat && hf); + +}; + +MakeError(SQLiteBusy, SQLiteError); void handleSQLiteBusy(const SQLiteBusy & e); diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index bb03daef4..62daa838c 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -52,6 +52,10 @@ public: bool sameMachine() override { return false; } + // FIXME extend daemon protocol, move implementation to RemoteStore + std::optional<std::string> getBuildLog(const StorePath & path) override + { unsupported("getBuildLog"); } + private: struct Connection : RemoteStore::Connection diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 1bbad71f2..69bfe3418 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -67,7 +67,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string if (fakeSSH) { args = { "bash", "-c" }; } else { - args = { "ssh", host.c_str(), "-x", "-a" }; + args = { "ssh", host.c_str(), "-x" }; addCommonSSHOpts(args); if (socketPath != "") args.insert(args.end(), {"-S", socketPath}); diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 86fa6a211..06a9758fc 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1,6 +1,7 @@ #include "crypto.hh" #include "fs-accessor.hh" #include "globals.hh" +#include "derivations.hh" #include "store-api.hh" #include "util.hh" #include "nar-info-disk-cache.hh" @@ -257,6 +258,84 @@ StorePath Store::addToStore( return addToStoreFromDump(*source, name, method, hashAlgo, repair, references); } +void Store::addMultipleToStore( + PathsSource & pathsToCopy, + Activity & act, + RepairFlag repair, + CheckSigsFlag checkSigs) +{ + std::atomic<size_t> nrDone{0}; + std::atomic<size_t> nrFailed{0}; + std::atomic<uint64_t> bytesExpected{0}; + std::atomic<uint64_t> nrRunning{0}; + + using PathWithInfo = std::pair<ValidPathInfo, std::unique_ptr<Source>>; + + std::map<StorePath, PathWithInfo *> infosMap; + StorePathSet storePathsToAdd; + for (auto & thingToAdd : pathsToCopy) { + infosMap.insert_or_assign(thingToAdd.first.path, &thingToAdd); + storePathsToAdd.insert(thingToAdd.first.path); + } + + auto showProgress = [&]() { + act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed); + }; + + ThreadPool pool; + + processGraph<StorePath>(pool, + storePathsToAdd, + + [&](const StorePath & path) { + + auto & [info, _] = *infosMap.at(path); + + if (isValidPath(info.path)) { + nrDone++; + showProgress(); + return StorePathSet(); + } + + bytesExpected += info.narSize; + act.setExpected(actCopyPath, bytesExpected); + + return info.references; + }, + + [&](const StorePath & path) { + checkInterrupt(); + + auto & [info_, source_] = *infosMap.at(path); + auto info = info_; + info.ultimate = false; + + /* Make sure that the Source object is destroyed when + we're done. In particular, a SinkToSource object must + be destroyed to ensure that the destructors on its + stack frame are run; this includes + LegacySSHStore::narFromPath()'s connection lock. */ + auto source = std::move(source_); + + if (!isValidPath(info.path)) { + MaintainCount<decltype(nrRunning)> mc(nrRunning); + showProgress(); + try { + addToStore(info, *source, repair, checkSigs); + } catch (Error & e) { + nrFailed++; + if (!settings.keepGoing) + throw e; + printMsg(lvlError, "could not copy %s: %s", printStorePath(path), e.what()); + showProgress(); + return; + } + } + + nrDone++; + showProgress(); + }); +} void Store::addMultipleToStore( Source & source, @@ -991,113 +1070,61 @@ std::map<StorePath, StorePath> copyPaths( for (auto & path : storePaths) if (!valid.count(path)) missing.insert(path); + Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size())); + + // In the general case, `addMultipleToStore` requires a sorted list of + // store paths to add, so sort them right now + auto sortedMissing = srcStore.topoSortPaths(missing); + std::reverse(sortedMissing.begin(), sortedMissing.end()); + std::map<StorePath, StorePath> pathsMap; for (auto & path : storePaths) pathsMap.insert_or_assign(path, path); - Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size())); + Store::PathsSource pathsToCopy; + + auto computeStorePathForDst = [&](const ValidPathInfo & currentPathInfo) -> StorePath { + auto storePathForSrc = currentPathInfo.path; + auto storePathForDst = storePathForSrc; + if (currentPathInfo.ca && currentPathInfo.references.empty()) { + storePathForDst = dstStore.makeFixedOutputPathFromCA(storePathForSrc.name(), *currentPathInfo.ca); + if (dstStore.storeDir == srcStore.storeDir) + assert(storePathForDst == storePathForSrc); + if (storePathForDst != storePathForSrc) + debug("replaced path '%s' to '%s' for substituter '%s'", + srcStore.printStorePath(storePathForSrc), + dstStore.printStorePath(storePathForDst), + dstStore.getUri()); + } + return storePathForDst; + }; - auto sorted = srcStore.topoSortPaths(missing); - std::reverse(sorted.begin(), sorted.end()); + for (auto & missingPath : sortedMissing) { + auto info = srcStore.queryPathInfo(missingPath); - auto source = sinkToSource([&](Sink & sink) { - sink << sorted.size(); - for (auto & storePath : sorted) { + auto storePathForDst = computeStorePathForDst(*info); + pathsMap.insert_or_assign(missingPath, storePathForDst); + + ValidPathInfo infoForDst = *info; + infoForDst.path = storePathForDst; + + auto source = sinkToSource([&](Sink & sink) { + // We can reasonably assume that the copy will happen whenever we + // read the path, so log something about that at that point auto srcUri = srcStore.getUri(); auto dstUri = dstStore.getUri(); - auto storePathS = srcStore.printStorePath(storePath); + auto storePathS = srcStore.printStorePath(missingPath); Activity act(*logger, lvlInfo, actCopyPath, makeCopyPathMessage(srcUri, dstUri, storePathS), {storePathS, srcUri, dstUri}); PushActivity pact(act.id); - auto info = srcStore.queryPathInfo(storePath); - info->write(sink, srcStore, 16); - srcStore.narFromPath(storePath, sink); - } - }); - - dstStore.addMultipleToStore(*source, repair, checkSigs); - - #if 0 - std::atomic<size_t> nrDone{0}; - std::atomic<size_t> nrFailed{0}; - std::atomic<uint64_t> bytesExpected{0}; - std::atomic<uint64_t> nrRunning{0}; - - auto showProgress = [&]() { - act.progress(nrDone, missing.size(), nrRunning, nrFailed); - }; - - ThreadPool pool; - - processGraph<StorePath>(pool, - StorePathSet(missing.begin(), missing.end()), - - [&](const StorePath & storePath) { - auto info = srcStore.queryPathInfo(storePath); - auto storePathForDst = storePath; - if (info->ca && info->references.empty()) { - storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca); - if (dstStore.storeDir == srcStore.storeDir) - assert(storePathForDst == storePath); - if (storePathForDst != storePath) - debug("replaced path '%s' to '%s' for substituter '%s'", - srcStore.printStorePath(storePath), - dstStore.printStorePath(storePathForDst), - dstStore.getUri()); - } - pathsMap.insert_or_assign(storePath, storePathForDst); - - if (dstStore.isValidPath(storePath)) { - nrDone++; - showProgress(); - return StorePathSet(); - } - - bytesExpected += info->narSize; - act.setExpected(actCopyPath, bytesExpected); - - return info->references; - }, - - [&](const StorePath & storePath) { - checkInterrupt(); - - auto info = srcStore.queryPathInfo(storePath); - - auto storePathForDst = storePath; - if (info->ca && info->references.empty()) { - storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca); - if (dstStore.storeDir == srcStore.storeDir) - assert(storePathForDst == storePath); - if (storePathForDst != storePath) - debug("replaced path '%s' to '%s' for substituter '%s'", - srcStore.printStorePath(storePath), - dstStore.printStorePath(storePathForDst), - dstStore.getUri()); - } - pathsMap.insert_or_assign(storePath, storePathForDst); - - if (!dstStore.isValidPath(storePathForDst)) { - MaintainCount<decltype(nrRunning)> mc(nrRunning); - showProgress(); - try { - copyStorePath(srcStore, dstStore, storePath, repair, checkSigs); - } catch (Error &e) { - nrFailed++; - if (!settings.keepGoing) - throw e; - printMsg(lvlError, "could not copy %s: %s", dstStore.printStorePath(storePath), e.what()); - showProgress(); - return; - } - } - - nrDone++; - showProgress(); + srcStore.narFromPath(missingPath, sink); }); - #endif + pathsToCopy.push_back(std::pair{infoForDst, std::move(source)}); + } + + dstStore.addMultipleToStore(pathsToCopy, act, repair, checkSigs); return pathsMap; } @@ -1301,7 +1328,8 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_ return {uri, params}; } -static bool isNonUriPath(const std::string & spec) { +static bool isNonUriPath(const std::string & spec) +{ return // is not a URL spec.find("://") == std::string::npos @@ -1313,11 +1341,36 @@ static bool isNonUriPath(const std::string & spec) { std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params) { if (uri == "" || uri == "auto") { - auto stateDir = get(params, "state").value_or(settings.nixStateDir); + auto stateDir = getOr(params, "state", settings.nixStateDir); if (access(stateDir.c_str(), R_OK | W_OK) == 0) return std::make_shared<LocalStore>(params); else if (pathExists(settings.nixDaemonSocketFile)) return std::make_shared<UDSRemoteStore>(params); + #if __linux__ + else if (!pathExists(stateDir) + && params.empty() + && getuid() != 0 + && !getEnv("NIX_STORE_DIR").has_value() + && !getEnv("NIX_STATE_DIR").has_value()) + { + /* If /nix doesn't exist, there is no daemon socket, and + we're not root, then automatically set up a chroot + store in ~/.local/share/nix/root. */ + auto chrootStore = getDataDir() + "/nix/root"; + if (!pathExists(chrootStore)) { + try { + createDirs(chrootStore); + } catch (Error & e) { + return std::make_shared<LocalStore>(params); + } + warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore); + } else + debug("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore); + Store::Params params2; + params2["root"] = chrootStore; + return std::make_shared<LocalStore>(params2); + } + #endif else return std::make_shared<LocalStore>(params); } else if (uri == "daemon") { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index e4fb1f1fd..c8a667c6d 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -1,5 +1,6 @@ #pragma once +#include "nar-info.hh" #include "realisation.hh" #include "path.hh" #include "derived-path.hh" @@ -10,8 +11,8 @@ #include "sync.hh" #include "globals.hh" #include "config.hh" -#include "derivations.hh" #include "path-info.hh" +#include "repair-flag.hh" #include <atomic> #include <limits> @@ -62,6 +63,8 @@ MakeError(BadStorePath, Error); MakeError(InvalidStoreURI, Error); +struct BasicDerivation; +struct Derivation; class FSAccessor; class NarInfoDiskCache; class Store; @@ -76,127 +79,10 @@ enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true }; const uint32_t exportMagic = 0x4558494e; -typedef std::unordered_map<StorePath, std::unordered_set<std::string>> Roots; - - -struct GCOptions -{ - /* Garbage collector operation: - - - `gcReturnLive': return the set of paths reachable from - (i.e. in the closure of) the roots. - - - `gcReturnDead': return the set of paths not reachable from - the roots. - - - `gcDeleteDead': actually delete the latter set. - - - `gcDeleteSpecific': delete the paths listed in - `pathsToDelete', insofar as they are not reachable. - */ - typedef enum { - gcReturnLive, - gcReturnDead, - gcDeleteDead, - gcDeleteSpecific, - } GCAction; - - GCAction action{gcDeleteDead}; - - /* If `ignoreLiveness' is set, then reachability from the roots is - ignored (dangerous!). However, the paths must still be - unreferenced *within* the store (i.e., there can be no other - store paths that depend on them). */ - bool ignoreLiveness{false}; - - /* For `gcDeleteSpecific', the paths to delete. */ - StorePathSet pathsToDelete; - - /* Stop after at least `maxFreed' bytes have been freed. */ - uint64_t maxFreed{std::numeric_limits<uint64_t>::max()}; -}; - - -struct GCResults -{ - /* Depending on the action, the GC roots, or the paths that would - be or have been deleted. */ - PathSet paths; - - /* For `gcReturnDead', `gcDeleteDead' and `gcDeleteSpecific', the - number of bytes that would be or was freed. */ - uint64_t bytesFreed = 0; -}; - - enum BuildMode { bmNormal, bmRepair, bmCheck }; +struct BuildResult; -struct BuildResult -{ - /* Note: don't remove status codes, and only add new status codes - at the end of the list, to prevent client/server - incompatibilities in the nix-store --serve protocol. */ - enum Status { - Built = 0, - Substituted, - AlreadyValid, - PermanentFailure, - InputRejected, - OutputRejected, - TransientFailure, // possibly transient - CachedFailure, // no longer used - TimedOut, - MiscFailure, - DependencyFailed, - LogLimitExceeded, - NotDeterministic, - ResolvesToAlreadyValid, - } status = MiscFailure; - std::string errorMsg; - - std::string toString() const { - auto strStatus = [&]() { - switch (status) { - case Built: return "Built"; - case Substituted: return "Substituted"; - case AlreadyValid: return "AlreadyValid"; - case PermanentFailure: return "PermanentFailure"; - case InputRejected: return "InputRejected"; - case OutputRejected: return "OutputRejected"; - case TransientFailure: return "TransientFailure"; - case CachedFailure: return "CachedFailure"; - case TimedOut: return "TimedOut"; - case MiscFailure: return "MiscFailure"; - case DependencyFailed: return "DependencyFailed"; - case LogLimitExceeded: return "LogLimitExceeded"; - case NotDeterministic: return "NotDeterministic"; - case ResolvesToAlreadyValid: return "ResolvesToAlreadyValid"; - default: return "Unknown"; - }; - }(); - return strStatus + ((errorMsg == "") ? "" : " : " + errorMsg); - } - - /* How many times this build was performed. */ - unsigned int timesBuilt = 0; - - /* If timesBuilt > 1, whether some builds did not produce the same - result. (Note that 'isNonDeterministic = false' does not mean - the build is deterministic, just that we don't have evidence of - non-determinism.) */ - bool isNonDeterministic = false; - - DrvOutputs builtOutputs; - - /* The start/stop times of the build (or one of the rounds, if it - was repeated). */ - time_t startTime = 0, stopTime = 0; - - bool success() { - return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid; - } -}; struct StoreConfig : public Config { @@ -474,12 +360,22 @@ public: virtual void addToStore(const ValidPathInfo & info, Source & narSource, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0; + // A list of paths infos along with a source providing the content of the + // associated store path + using PathsSource = std::vector<std::pair<ValidPathInfo, std::unique_ptr<Source>>>; + /* Import multiple paths into the store. */ virtual void addMultipleToStore( Source & source, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs); + virtual void addMultipleToStore( + PathsSource & pathsToCopy, + Activity & act, + RepairFlag repair = NoRepair, + CheckSigsFlag checkSigs = CheckSigs); + /* Copy the contents of a path to the store and register the validity the resulting path. The resulting path is returned. The function object `filter' can be used to exclude files (see @@ -549,6 +445,16 @@ public: BuildMode buildMode = bmNormal, std::shared_ptr<Store> evalStore = nullptr); + /* Like `buildPaths()`, but return a vector of `BuildResult`s + corresponding to each element in `paths`. Note that in case of + a build/substitution error, this function won't throw an + exception, but return a `BuildResult` containing an error + message. */ + virtual std::vector<BuildResult> buildPathsWithResults( + const std::vector<DerivedPath> & paths, + BuildMode buildMode = bmNormal, + std::shared_ptr<Store> evalStore = nullptr); + /* Build a single non-materialized derivation (i.e. not from an on-disk .drv file). @@ -595,26 +501,6 @@ public: virtual void addTempRoot(const StorePath & path) { debug("not creating temporary root, store doesn't support GC"); } - /* Add an indirect root, which is merely a symlink to `path' from - /nix/var/nix/gcroots/auto/<hash of `path'>. `path' is supposed - to be a symlink to a store path. The garbage collector will - automatically remove the indirect root when it finds that - `path' has disappeared. */ - virtual void addIndirectRoot(const Path & path) - { unsupported("addIndirectRoot"); } - - /* Find the roots of the garbage collector. Each root is a pair - (link, storepath) where `link' is the path of the symlink - outside of the Nix store that point to `storePath'. If - 'censor' is true, privacy-sensitive information about roots - found in /proc is censored. */ - virtual Roots findRoots(bool censor) - { unsupported("findRoots"); } - - /* Perform a garbage collection. */ - virtual void collectGarbage(const GCOptions & options, GCResults & results) - { unsupported("collectGarbage"); } - /* Return a string representing information about the path that can be loaded into the database using `nix-store --load-db' or `nix-store --register-validity'. */ @@ -732,14 +618,6 @@ public: */ StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths); - /* Return the build log of the specified store path, if available, - or null otherwise. */ - virtual std::optional<std::string> getBuildLog(const StorePath & path) - { return std::nullopt; } - - virtual void addBuildLog(const StorePath & path, std::string_view log) - { unsupported("addBuildLog"); } - /* Hack to allow long-running processes like hydra-queue-runner to occasionally flush their path info cache. */ void clearPathInfoCache() diff --git a/src/libstore/store-cast.hh b/src/libstore/store-cast.hh new file mode 100644 index 000000000..ff62fc359 --- /dev/null +++ b/src/libstore/store-cast.hh @@ -0,0 +1,16 @@ +#pragma once + +#include "store-api.hh" + +namespace nix { + +template<typename T> +T & require(Store & store) +{ + auto * castedStore = dynamic_cast<T *>(&store); + if (!castedStore) + throw UsageError("%s not supported by store '%s'", T::operationName, store.getUri()); + return *castedStore; +} + +} diff --git a/src/libstore/tests/path-with-outputs.cc b/src/libstore/tests/path-with-outputs.cc new file mode 100644 index 000000000..350ea7ffd --- /dev/null +++ b/src/libstore/tests/path-with-outputs.cc @@ -0,0 +1,46 @@ +#include "path-with-outputs.hh" + +#include <gtest/gtest.h> + +namespace nix { + +TEST(parseOutputsSpec, basic) +{ + { + auto [prefix, outputsSpec] = parseOutputsSpec("foo"); + ASSERT_EQ(prefix, "foo"); + ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec)); + } + + { + auto [prefix, outputsSpec] = parseOutputsSpec("foo^*"); + ASSERT_EQ(prefix, "foo"); + ASSERT_TRUE(std::get_if<AllOutputs>(&outputsSpec)); + } + + { + auto [prefix, outputsSpec] = parseOutputsSpec("foo^out"); + ASSERT_EQ(prefix, "foo"); + ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out"})); + } + + { + auto [prefix, outputsSpec] = parseOutputsSpec("foo^out,bin"); + ASSERT_EQ(prefix, "foo"); + ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"})); + } + + { + auto [prefix, outputsSpec] = parseOutputsSpec("foo^bar^out,bin"); + ASSERT_EQ(prefix, "foo^bar"); + ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"})); + } + + { + auto [prefix, outputsSpec] = parseOutputsSpec("foo^&*()"); + ASSERT_EQ(prefix, "foo^&*()"); + ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec)); + } +} + +} diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index c8332afe6..87088a3ac 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -9,7 +9,7 @@ namespace nix { #define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_2 0x6478696f -#define PROTOCOL_VERSION (1 << 8 | 33) +#define PROTOCOL_VERSION (1 << 8 | 34) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) @@ -57,6 +57,7 @@ typedef enum { wopQueryRealisation = 43, wopAddMultipleToStore = 44, wopAddBuildLog = 45, + wopBuildPathsWithResults = 46, } WorkerOp; @@ -91,6 +92,7 @@ MAKE_WORKER_PROTO(, ContentAddress); MAKE_WORKER_PROTO(, DerivedPath); MAKE_WORKER_PROTO(, Realisation); MAKE_WORKER_PROTO(, DrvOutput); +MAKE_WORKER_PROTO(, BuildResult); MAKE_WORKER_PROTO(template<typename T>, std::vector<T>); MAKE_WORKER_PROTO(template<typename T>, std::set<T>); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index eda004756..4b0636129 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -64,11 +64,12 @@ static void dumpContents(const Path & path, off_t size, } -static void dump(const Path & path, Sink & sink, PathFilter & filter) +static time_t dump(const Path & path, Sink & sink, PathFilter & filter) { checkInterrupt(); auto st = lstat(path); + time_t result = st.st_mtime; sink << "("; @@ -103,7 +104,10 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter) for (auto & i : unhacked) if (filter(path + "/" + i.first)) { sink << "entry" << "(" << "name" << i.first << "node"; - dump(path + "/" + i.second, sink, filter); + auto tmp_mtime = dump(path + "/" + i.second, sink, filter); + if (tmp_mtime > result) { + result = tmp_mtime; + } sink << ")"; } } @@ -114,13 +118,20 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter) else throw Error("file '%1%' has an unsupported type", path); sink << ")"; + + return result; } -void dumpPath(const Path & path, Sink & sink, PathFilter & filter) +time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { sink << narVersionMagic1; - dump(path, sink, filter); + return dump(path, sink, filter); +} + +void dumpPath(const Path & path, Sink & sink, PathFilter & filter) +{ + dumpPathAndGetMtime(path, sink, filter); } @@ -223,6 +234,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path) else if (s == "contents" && type == tpRegular) { parseContents(sink, source, path); + sink.closeRegularFile(); } else if (s == "executable" && type == tpRegular) { @@ -313,6 +325,12 @@ struct RestoreSink : ParseSink if (!fd) throw SysError("creating file '%1%'", p); } + void closeRegularFile() override + { + /* Call close explicitly to make sure the error is checked */ + fd.close(); + } + void isExecutable() override { struct stat st; diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index fca351605..ac4183bf5 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -48,6 +48,10 @@ namespace nix { void dumpPath(const Path & path, Sink & sink, PathFilter & filter = defaultPathFilter); +/* Same as `void dumpPath()`, but returns the last modified date of the path */ +time_t dumpPathAndGetMtime(const Path & path, Sink & sink, + PathFilter & filter = defaultPathFilter); + void dumpString(std::string_view s, Sink & sink); /* FIXME: fix this API, it sucks. */ @@ -56,6 +60,7 @@ struct ParseSink virtual void createDirectory(const Path & path) { }; virtual void createRegularFile(const Path & path) { }; + virtual void closeRegularFile() { }; virtual void isExecutable() { }; virtual void preallocateContents(uint64_t size) { }; virtual void receiveContents(std::string_view data) { }; diff --git a/src/libutil/args.cc b/src/libutil/args.cc index f970c0e9e..753980fd4 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -124,14 +124,14 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) bool anyCompleted = false; for (size_t n = 0 ; n < flag.handler.arity; ++n) { if (pos == end) { - if (flag.handler.arity == ArityAny) break; + if (flag.handler.arity == ArityAny || anyCompleted) break; throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity); } - if (flag.completer) - if (auto prefix = needsCompletion(*pos)) { - anyCompleted = true; + if (auto prefix = needsCompletion(*pos)) { + anyCompleted = true; + if (flag.completer) flag.completer(n, *prefix); - } + } args.push_back(*pos++); } if (!anyCompleted) @@ -146,6 +146,7 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end) && hasPrefix(name, std::string(*prefix, 2))) completions->add("--" + name, flag->description); } + return false; } auto i = longFlags.find(std::string(*pos, 2)); if (i == longFlags.end()) return false; @@ -187,10 +188,12 @@ bool Args::processArgs(const Strings & args, bool finish) { std::vector<std::string> ss; for (const auto &[n, s] : enumerate(args)) { - ss.push_back(s); - if (exp.completer) - if (auto prefix = needsCompletion(s)) + if (auto prefix = needsCompletion(s)) { + ss.push_back(*prefix); + if (exp.completer) exp.completer(n, *prefix); + } else + ss.push_back(s); } exp.handler.fun(ss); expectedArgs.pop_front(); @@ -213,7 +216,7 @@ nlohmann::json Args::toJSON() if (flag->shortName) j["shortName"] = std::string(1, flag->shortName); if (flag->description != "") - j["description"] = flag->description; + j["description"] = trim(flag->description); j["category"] = flag->category; if (flag->handler.arity != ArityAny) j["arity"] = flag->handler.arity; @@ -234,7 +237,7 @@ nlohmann::json Args::toJSON() } auto res = nlohmann::json::object(); - res["description"] = description(); + res["description"] = trim(description()); res["flags"] = std::move(flags); res["args"] = std::move(args); auto s = doc(); @@ -279,21 +282,22 @@ static void _completePath(std::string_view prefix, bool onlyDirs) { completionType = ctFilenames; glob_t globbuf; - int flags = GLOB_NOESCAPE | GLOB_TILDE; + int flags = GLOB_NOESCAPE; #ifdef GLOB_ONLYDIR if (onlyDirs) flags |= GLOB_ONLYDIR; #endif - if (glob((std::string(prefix) + "*").c_str(), flags, nullptr, &globbuf) == 0) { + // using expandTilde here instead of GLOB_TILDE(_CHECK) so that ~<Tab> expands to /home/user/ + if (glob((expandTilde(prefix) + "*").c_str(), flags, nullptr, &globbuf) == 0) { for (size_t i = 0; i < globbuf.gl_pathc; ++i) { if (onlyDirs) { - auto st = lstat(globbuf.gl_pathv[i]); + auto st = stat(globbuf.gl_pathv[i]); if (!S_ISDIR(st.st_mode)) continue; } completions->add(globbuf.gl_pathv[i]); } - globfree(&globbuf); } + globfree(&globbuf); } void completePath(size_t, std::string_view prefix) @@ -322,16 +326,21 @@ MultiCommand::MultiCommand(const Commands & commands_) .optional = true, .handler = {[=](std::string s) { assert(!command); - if (auto prefix = needsCompletion(s)) { - for (auto & [name, command] : commands) - if (hasPrefix(name, *prefix)) - completions->add(name); - } auto i = commands.find(s); - if (i == commands.end()) - throw UsageError("'%s' is not a recognised command", s); + if (i == commands.end()) { + std::set<std::string> commandNames; + for (auto & [name, _] : commands) + commandNames.insert(name); + auto suggestions = Suggestions::bestMatches(commandNames, s); + throw UsageError(suggestions, "'%s' is not a recognised command", s); + } command = {s, i->second()}; command->second->parent = this; + }}, + .completer = {[&](size_t, std::string_view prefix) { + for (auto & [name, command] : commands) + if (hasPrefix(name, prefix)) + completions->add(name); }} }); @@ -353,6 +362,14 @@ bool MultiCommand::processArgs(const Strings & args, bool finish) return Args::processArgs(args, finish); } +void MultiCommand::completionHook() +{ + if (command) + return command->second->completionHook(); + else + return Args::completionHook(); +} + nlohmann::json MultiCommand::toJSON() { auto cmds = nlohmann::json::object(); @@ -362,7 +379,7 @@ nlohmann::json MultiCommand::toJSON() auto j = command->toJSON(); auto cat = nlohmann::json::object(); cat["id"] = command->category(); - cat["description"] = categories[command->category()]; + cat["description"] = trim(categories[command->category()]); j["category"] = std::move(cat); cmds[name] = std::move(j); } diff --git a/src/libutil/args.hh b/src/libutil/args.hh index fdd036f9a..84866f12b 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -25,6 +25,8 @@ public: /* Return a short one-line description of the command. */ virtual std::string description() { return ""; } + virtual bool forceImpureByDefault() { return false; } + /* Return documentation about this command, in Markdown format. */ virtual std::string doc() { return ""; } @@ -146,6 +148,11 @@ protected: argument (if any) have been processed. */ virtual void initialFlagsProcessed() {} + /* Called after the command line has been processed if we need to generate + completions. Useful for commands that need to know the whole command line + in order to know what completions to generate. */ + virtual void completionHook() { } + public: void addFlag(Flag && flag); @@ -221,6 +228,8 @@ public: bool processArgs(const Strings & args, bool finish) override; + void completionHook() override; + nlohmann::json toJSON() override; }; diff --git a/src/libutil/chunked-vector.hh b/src/libutil/chunked-vector.hh new file mode 100644 index 000000000..0a4f0b400 --- /dev/null +++ b/src/libutil/chunked-vector.hh @@ -0,0 +1,68 @@ +#pragma once + +#include <cstdint> +#include <cstdlib> +#include <vector> +#include <limits> + +namespace nix { + +/* Provides an indexable container like vector<> with memory overhead + guarantees like list<> by allocating storage in chunks of ChunkSize + elements instead of using a contiguous memory allocation like vector<> + does. Not using a single vector that is resized reduces memory overhead + on large data sets by on average (growth factor)/2, mostly + eliminates copies within the vector during resizing, and provides stable + references to its elements. */ +template<typename T, size_t ChunkSize> +class ChunkedVector { +private: + uint32_t size_ = 0; + std::vector<std::vector<T>> chunks; + + /* keep this out of the ::add hot path */ + [[gnu::noinline]] + auto & addChunk() + { + if (size_ >= std::numeric_limits<uint32_t>::max() - ChunkSize) + abort(); + chunks.emplace_back(); + chunks.back().reserve(ChunkSize); + return chunks.back(); + } + +public: + ChunkedVector(uint32_t reserve) + { + chunks.reserve(reserve); + addChunk(); + } + + uint32_t size() const { return size_; } + + std::pair<T &, uint32_t> add(T value) + { + const auto idx = size_++; + auto & chunk = [&] () -> auto & { + if (auto & back = chunks.back(); back.size() < ChunkSize) + return back; + return addChunk(); + }(); + auto & result = chunk.emplace_back(std::move(value)); + return {result, idx}; + } + + const T & operator[](uint32_t idx) const + { + return chunks[idx / ChunkSize][idx % ChunkSize]; + } + + template<typename Fn> + void forEach(Fn fn) const + { + for (const auto & c : chunks) + for (const auto & e : c) + fn(e); + } +}; +} diff --git a/src/libutil/error.cc b/src/libutil/error.cc index dcd2f82a5..9172f67a6 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -9,10 +9,9 @@ namespace nix { const std::string nativeSystem = SYSTEM; -BaseError & BaseError::addTrace(std::optional<ErrPos> e, hintformat hint) +void BaseError::addTrace(std::optional<ErrPos> e, hintformat hint) { err.traces.push_front(Trace { .pos = e, .hint = hint }); - return *this; } // c++ std::exception descendants must have a 'const char* what()' function. @@ -22,12 +21,9 @@ const std::string & BaseError::calcWhat() const if (what_.has_value()) return *what_; else { - err.name = sname(); - std::ostringstream oss; showErrorInfo(oss, err, loggerSettings.showTrace); what_ = oss.str(); - return *what_; } } @@ -282,6 +278,13 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s } } + auto suggestions = einfo.suggestions.trim(); + if (! suggestions.suggestions.empty()){ + oss << "Did you mean " << + suggestions.trim() << + "?" << std::endl; + } + // traces if (showTrace && !einfo.traces.empty()) { for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter) { diff --git a/src/libutil/error.hh b/src/libutil/error.hh index d55e1d701..3d1479c54 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -1,5 +1,6 @@ #pragma once +#include "suggestions.hh" #include "ref.hh" #include "types.hh" #include "fmt.hh" @@ -53,6 +54,7 @@ typedef enum { lvlVomit } Verbosity; +/* adjust Pos::origin bit width when adding stuff here */ typedef enum { foFile, foStdin, @@ -85,11 +87,7 @@ struct ErrPos { origin = pos.origin; line = pos.line; column = pos.column; - // is file symbol null? - if (pos.file.set()) - file = pos.file; - else - file = ""; + file = pos.file; return *this; } @@ -100,6 +98,15 @@ struct ErrPos { } }; +std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos); + +void printCodeLines(std::ostream & out, + const std::string & prefix, + const ErrPos & errPos, + const LinesOfCode & loc); + +void printAtPos(const ErrPos & pos, std::ostream & out); + struct Trace { std::optional<ErrPos> pos; hintformat hint; @@ -107,11 +114,12 @@ struct Trace { struct ErrorInfo { Verbosity level; - std::string name; // FIXME: rename hintformat msg; std::optional<ErrPos> errPos; std::list<Trace> traces; + Suggestions suggestions; + static std::optional<std::string> programName; }; @@ -141,6 +149,11 @@ public: : err { .level = lvlError, .msg = hintfmt(fs, args...) } { } + template<typename... Args> + BaseError(const Suggestions & sug, const Args & ... args) + : err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug } + { } + BaseError(hintformat hint) : err { .level = lvlError, .msg = hint } { } @@ -153,8 +166,6 @@ public: : err(e) { } - virtual const char* sname() const { return "BaseError"; } - #ifdef EXCEPTION_NEEDS_THROW_SPEC ~BaseError() throw () { }; const char * what() const throw () { return calcWhat().c_str(); } @@ -166,12 +177,12 @@ public: const ErrorInfo & info() const { calcWhat(); return err; } template<typename... Args> - BaseError & addTrace(std::optional<ErrPos> e, const std::string & fs, const Args & ... args) + void addTrace(std::optional<ErrPos> e, const std::string & fs, const Args & ... args) { - return addTrace(e, hintfmt(fs, args...)); + addTrace(e, hintfmt(fs, args...)); } - BaseError & addTrace(std::optional<ErrPos> e, hintformat hint); + void addTrace(std::optional<ErrPos> e, hintformat hint); bool hasTrace() const { return !err.traces.empty(); } }; @@ -181,7 +192,6 @@ public: { \ public: \ using superClass::superClass; \ - virtual const char* sname() const override { return #newClass; } \ } MakeError(Error, BaseError); @@ -194,15 +204,19 @@ public: int errNo; template<typename... Args> - SysError(const Args & ... args) + SysError(int errNo_, const Args & ... args) : Error("") { - errNo = errno; + errNo = errNo_; auto hf = hintfmt(args...); err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo)); } - virtual const char* sname() const override { return "SysError"; } + template<typename... Args> + SysError(const Args & ... args) + : SysError(errno, args ...) + { + } }; } diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index 52ba549a3..30d071408 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -7,10 +7,13 @@ namespace nix { std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = { { Xp::CaDerivations, "ca-derivations" }, + { Xp::ImpureDerivations, "impure-derivations" }, { Xp::Flakes, "flakes" }, { Xp::NixCommand, "nix-command" }, { Xp::RecursiveNix, "recursive-nix" }, { Xp::NoUrlLiterals, "no-url-literals" }, + { Xp::FetchClosure, "fetch-closure" }, + { Xp::ReplFlake, "repl-flake" }, { Xp::AutoAllocateUids, "auto-allocate-uids" }, { Xp::SystemdCgroup, "systemd-cgroup" }, }; @@ -35,7 +38,9 @@ const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::str std::string_view showExperimentalFeature(const ExperimentalFeature feature) { - return stringifiedXpFeatures.at(feature); + const auto ret = get(stringifiedXpFeatures, feature); + assert(ret); + return *ret; } std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> & rawFeatures) @@ -58,4 +63,20 @@ std::ostream & operator <<(std::ostream & str, const ExperimentalFeature & featu return str << showExperimentalFeature(feature); } +void to_json(nlohmann::json & j, const ExperimentalFeature & feature) +{ + j = showExperimentalFeature(feature); +} + +void from_json(const nlohmann::json & j, ExperimentalFeature & feature) +{ + const std::string input = j; + const auto parsed = parseExperimentalFeature(input); + + if (parsed.has_value()) + feature = *parsed; + else + throw Error("Unknown experimental feature '%s' in JSON input", input); +} + } diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh index 00df8b165..c749d4767 100644 --- a/src/libutil/experimental-features.hh +++ b/src/libutil/experimental-features.hh @@ -16,10 +16,13 @@ namespace nix { enum struct ExperimentalFeature { CaDerivations, + ImpureDerivations, Flakes, NixCommand, RecursiveNix, NoUrlLiterals, + FetchClosure, + ReplFlake, AutoAllocateUids, SystemdCgroup, }; @@ -49,10 +52,13 @@ public: ExperimentalFeature missingFeature; MissingExperimentalFeature(ExperimentalFeature); - virtual const char * sname() const override - { - return "MissingExperimentalFeature"; - } }; +/** + * Semi-magic conversion to and from json. + * See the nlohmann/json readme for more details. + */ +void to_json(nlohmann::json &, const ExperimentalFeature &); +void from_json(const nlohmann::json &, ExperimentalFeature &); + } diff --git a/src/libutil/filesystem.cc b/src/libutil/filesystem.cc new file mode 100644 index 000000000..403389e60 --- /dev/null +++ b/src/libutil/filesystem.cc @@ -0,0 +1,172 @@ +#include <sys/time.h> +#include <filesystem> + +#include "finally.hh" +#include "util.hh" +#include "types.hh" + +namespace fs = std::filesystem; + +namespace nix { + +static Path tempName(Path tmpRoot, const Path & prefix, bool includePid, + int & counter) +{ + tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true); + if (includePid) + return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str(); + else + return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str(); +} + +Path createTempDir(const Path & tmpRoot, const Path & prefix, + bool includePid, bool useGlobalCounter, mode_t mode) +{ + static int globalCounter = 0; + int localCounter = 0; + int & counter(useGlobalCounter ? globalCounter : localCounter); + + while (1) { + checkInterrupt(); + Path tmpDir = tempName(tmpRoot, prefix, includePid, counter); + if (mkdir(tmpDir.c_str(), mode) == 0) { +#if __FreeBSD__ + /* Explicitly set the group of the directory. This is to + work around around problems caused by BSD's group + ownership semantics (directories inherit the group of + the parent). For instance, the group of /tmp on + FreeBSD is "wheel", so all directories created in /tmp + will be owned by "wheel"; but if the user is not in + "wheel", then "tar" will fail to unpack archives that + have the setgid bit set on directories. */ + if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0) + throw SysError("setting group of directory '%1%'", tmpDir); +#endif + return tmpDir; + } + if (errno != EEXIST) + throw SysError("creating directory '%1%'", tmpDir); + } +} + + +std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix) +{ + Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX"); + // Strictly speaking, this is UB, but who cares... + // FIXME: use O_TMPFILE. + AutoCloseFD fd(mkstemp((char *) tmpl.c_str())); + if (!fd) + throw SysError("creating temporary file '%s'", tmpl); + closeOnExec(fd.get()); + return {std::move(fd), tmpl}; +} + +void createSymlink(const Path & target, const Path & link, + std::optional<time_t> mtime) +{ + if (symlink(target.c_str(), link.c_str())) + throw SysError("creating symlink from '%1%' to '%2%'", link, target); + if (mtime) { + struct timeval times[2]; + times[0].tv_sec = *mtime; + times[0].tv_usec = 0; + times[1].tv_sec = *mtime; + times[1].tv_usec = 0; + if (lutimes(link.c_str(), times)) + throw SysError("setting time of symlink '%s'", link); + } +} + +void replaceSymlink(const Path & target, const Path & link, + std::optional<time_t> mtime) +{ + for (unsigned int n = 0; true; n++) { + Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link))); + + try { + createSymlink(target, tmp, mtime); + } catch (SysError & e) { + if (e.errNo == EEXIST) continue; + throw; + } + + renameFile(tmp, link); + + break; + } +} + +void setWriteTime(const fs::path & p, const struct stat & st) +{ + struct timeval times[2]; + times[0] = { + .tv_sec = st.st_atime, + .tv_usec = 0, + }; + times[1] = { + .tv_sec = st.st_mtime, + .tv_usec = 0, + }; + if (lutimes(p.c_str(), times) != 0) + throw SysError("changing modification time of '%s'", p); +} + +void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete) +{ + // TODO: Rewrite the `is_*` to use `symlink_status()` + auto statOfFrom = lstat(from.path().c_str()); + auto fromStatus = from.symlink_status(); + + // Mark the directory as writable so that we can delete its children + if (andDelete && fs::is_directory(fromStatus)) { + fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow); + } + + + if (fs::is_symlink(fromStatus) || fs::is_regular_file(fromStatus)) { + fs::copy(from.path(), to, fs::copy_options::copy_symlinks | fs::copy_options::overwrite_existing); + } else if (fs::is_directory(fromStatus)) { + fs::create_directory(to); + for (auto & entry : fs::directory_iterator(from.path())) { + copy(entry, to / entry.path().filename(), andDelete); + } + } else { + throw Error("file '%s' has an unsupported type", from.path()); + } + + setWriteTime(to, statOfFrom); + if (andDelete) { + if (!fs::is_symlink(fromStatus)) + fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow); + fs::remove(from.path()); + } +} + +void renameFile(const Path & oldName, const Path & newName) +{ + fs::rename(oldName, newName); +} + +void moveFile(const Path & oldName, const Path & newName) +{ + try { + renameFile(oldName, newName); + } catch (fs::filesystem_error & e) { + auto oldPath = fs::path(oldName); + auto newPath = fs::path(newName); + // For the move to be as atomic as possible, copy to a temporary + // directory + fs::path temp = createTempDir(newPath.parent_path(), "rename-tmp"); + Finally removeTemp = [&]() { fs::remove(temp); }; + auto tempCopyTarget = temp / "copy-target"; + if (e.code().value() == EXDEV) { + fs::remove(newPath); + warn("Can’t rename %s as %s, copying instead", oldName, newName); + copy(fs::directory_entry(oldPath), tempCopyTarget, true); + renameFile(tempCopyTarget, newPath); + } + } +} + +} diff --git a/src/libutil/finally.hh b/src/libutil/finally.hh index 7760cfe9a..dee2e8d2f 100644 --- a/src/libutil/finally.hh +++ b/src/libutil/finally.hh @@ -1,14 +1,13 @@ #pragma once -#include <functional> - /* A trivial class to run a function at the end of a scope. */ +template<typename Fn> class Finally { private: - std::function<void()> fun; + Fn fun; public: - Finally(std::function<void()> fun) : fun(fun) { } + Finally(Fn fun) : fun(std::move(fun)) { } ~Finally() { fun(); } }; diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 0821b3b74..7664e5c04 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -2,7 +2,6 @@ #include <boost/format.hpp> #include <string> -#include <regex> #include "ansicolor.hh" @@ -155,15 +154,4 @@ inline hintformat hintfmt(std::string plain_string) return hintfmt("%s", normaltxt(plain_string)); } -/* Highlight all the given matches in the given string `s` by wrapping - them between `prefix` and `postfix`. - - If some matches overlap, then their union will be wrapped rather - than the individual matches. */ -std::string hiliteMatches( - std::string_view s, - std::vector<std::smatch> matches, - std::string_view prefix, - std::string_view postfix); - } diff --git a/src/libutil/git.cc b/src/libutil/git.cc new file mode 100644 index 000000000..f35c2fdb7 --- /dev/null +++ b/src/libutil/git.cc @@ -0,0 +1,25 @@ +#include "git.hh" + +#include <regex> + +namespace nix { +namespace git { + +std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line) +{ + const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$"); + std::match_results<std::string_view::const_iterator> match; + if (!std::regex_match(line.cbegin(), line.cend(), match, line_regex)) + return std::nullopt; + + return LsRemoteRefLine { + .kind = match[1].length() == 0 + ? LsRemoteRefLine::Kind::Object + : LsRemoteRefLine::Kind::Symbolic, + .target = match[2], + .reference = match[3].length() == 0 ? std::nullopt : std::optional<std::string>{ match[3] } + }; +} + +} +} diff --git a/src/libutil/git.hh b/src/libutil/git.hh new file mode 100644 index 000000000..cb13ef0e5 --- /dev/null +++ b/src/libutil/git.hh @@ -0,0 +1,40 @@ +#pragma once + +#include <string> +#include <string_view> +#include <optional> + +namespace nix { + +namespace git { + +// A line from the output of `git ls-remote --symref`. +// +// These can be of two kinds: +// +// - Symbolic references of the form +// +// ref: {target} {reference} +// +// where {target} is itself a reference and {reference} is optional +// +// - Object references of the form +// +// {target} {reference} +// +// where {target} is a commit id and {reference} is mandatory +struct LsRemoteRefLine { + enum struct Kind { + Symbolic, + Object + }; + Kind kind; + std::string target; + std::optional<std::string> reference; +}; + +std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line); + +} + +} diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index a4d632161..d2fd0c15a 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -155,7 +155,7 @@ static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_ { bool isSRI = false; - // Parse the has type before the separater, if there was one. + // Parse the hash type before the separator, if there was one. std::optional<HashType> optParsedType; { auto hashRaw = splitPrefixTo(rest, ':'); diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 56b5938b3..00f70a572 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -93,13 +93,11 @@ public: std::string gitRev() const { - assert(type == htSHA1); return to_string(Base16, false); } std::string gitShortRev() const { - assert(type == htSHA1); return std::string(to_string(Base16, false), 0, 7); } diff --git a/src/libutil/fmt.cc b/src/libutil/hilite.cc index 3dd93d73e..e5088230d 100644 --- a/src/libutil/fmt.cc +++ b/src/libutil/hilite.cc @@ -1,6 +1,4 @@ -#include "fmt.hh" - -#include <regex> +#include "hilite.hh" namespace nix { @@ -10,9 +8,9 @@ std::string hiliteMatches( std::string_view prefix, std::string_view postfix) { - // Avoid copy on zero matches + // Avoid extra work on zero matches if (matches.size() == 0) - return (std::string) s; + return std::string(s); std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) { return a.position() < b.position(); diff --git a/src/libutil/hilite.hh b/src/libutil/hilite.hh new file mode 100644 index 000000000..f8bdbfc55 --- /dev/null +++ b/src/libutil/hilite.hh @@ -0,0 +1,20 @@ +#pragma once + +#include <regex> +#include <vector> +#include <string> + +namespace nix { + +/* Highlight all the given matches in the given string `s` by wrapping + them between `prefix` and `postfix`. + + If some matches overlap, then their union will be wrapped rather + than the individual matches. */ +std::string hiliteMatches( + std::string_view s, + std::vector<std::smatch> matches, + std::string_view prefix, + std::string_view postfix); + +} diff --git a/src/libutil/json-utils.hh b/src/libutil/json-utils.hh new file mode 100644 index 000000000..b8a031227 --- /dev/null +++ b/src/libutil/json-utils.hh @@ -0,0 +1,21 @@ +#pragma once + +#include <nlohmann/json.hpp> + +namespace nix { + +const nlohmann::json * get(const nlohmann::json & map, const std::string & key) +{ + auto i = map.find(key); + if (i == map.end()) return nullptr; + return &*i; +} + +nlohmann::json * get(nlohmann::json & map, const std::string & key) +{ + auto i = map.find(key); + if (i == map.end()) return nullptr; + return &*i; +} + +} diff --git a/src/libutil/json.cc b/src/libutil/json.cc index 3a981376f..2f9e97ff5 100644 --- a/src/libutil/json.cc +++ b/src/libutil/json.cc @@ -1,11 +1,13 @@ #include "json.hh" #include <iomanip> +#include <cstdint> #include <cstring> namespace nix { -void toJSON(std::ostream & str, const char * start, const char * end) +template<> +void toJSON<std::string_view>(std::ostream & str, const std::string_view & s) { constexpr size_t BUF_SIZE = 4096; char buf[BUF_SIZE + 7]; // BUF_SIZE + largest single sequence of puts @@ -20,7 +22,7 @@ void toJSON(std::ostream & str, const char * start, const char * end) }; put('"'); - for (auto i = start; i != end; i++) { + for (auto i = s.begin(); i != s.end(); i++) { if (bufPos >= BUF_SIZE) flush(); if (*i == '\"' || *i == '\\') { put('\\'); put(*i); } else if (*i == '\n') { put('\\'); put('n'); } @@ -43,7 +45,7 @@ void toJSON(std::ostream & str, const char * start, const char * end) void toJSON(std::ostream & str, const char * s) { - if (!s) str << "null"; else toJSON(str, s, s + strlen(s)); + if (!s) str << "null"; else toJSON(str, std::string_view(s)); } template<> void toJSON<int>(std::ostream & str, const int & n) { str << n; } @@ -54,11 +56,7 @@ template<> void toJSON<long long>(std::ostream & str, const long long & n) { str template<> void toJSON<unsigned long long>(std::ostream & str, const unsigned long long & n) { str << n; } template<> void toJSON<float>(std::ostream & str, const float & n) { str << n; } template<> void toJSON<double>(std::ostream & str, const double & n) { str << n; } - -template<> void toJSON<std::string>(std::ostream & str, const std::string & s) -{ - toJSON(str, s.c_str(), s.c_str() + s.size()); -} +template<> void toJSON<std::string>(std::ostream & str, const std::string & s) { toJSON(str, (std::string_view) s); } template<> void toJSON<bool>(std::ostream & str, const bool & b) { @@ -153,7 +151,7 @@ JSONObject::~JSONObject() } } -void JSONObject::attr(const std::string & s) +void JSONObject::attr(std::string_view s) { comma(); toJSON(state->str, s); @@ -161,19 +159,19 @@ void JSONObject::attr(const std::string & s) if (state->indent) state->str << ' '; } -JSONList JSONObject::list(const std::string & name) +JSONList JSONObject::list(std::string_view name) { attr(name); return JSONList(state); } -JSONObject JSONObject::object(const std::string & name) +JSONObject JSONObject::object(std::string_view name) { attr(name); return JSONObject(state); } -JSONPlaceholder JSONObject::placeholder(const std::string & name) +JSONPlaceholder JSONObject::placeholder(std::string_view name) { attr(name); return JSONPlaceholder(state); @@ -195,7 +193,11 @@ JSONObject JSONPlaceholder::object() JSONPlaceholder::~JSONPlaceholder() { - assert(!first || std::uncaught_exceptions()); + if (first) { + assert(std::uncaught_exceptions()); + if (state->stack != 0) + write(nullptr); + } } } diff --git a/src/libutil/json.hh b/src/libutil/json.hh index 83213ca66..3790b1a2e 100644 --- a/src/libutil/json.hh +++ b/src/libutil/json.hh @@ -6,7 +6,6 @@ namespace nix { -void toJSON(std::ostream & str, const char * start, const char * end); void toJSON(std::ostream & str, const char * s); template<typename T> @@ -107,7 +106,7 @@ private: open(); } - void attr(const std::string & s); + void attr(std::string_view s); public: @@ -128,18 +127,18 @@ public: ~JSONObject(); template<typename T> - JSONObject & attr(const std::string & name, const T & v) + JSONObject & attr(std::string_view name, const T & v) { attr(name); toJSON(state->str, v); return *this; } - JSONList list(const std::string & name); + JSONList list(std::string_view name); - JSONObject object(const std::string & name); + JSONObject object(std::string_view name); - JSONPlaceholder placeholder(const std::string & name); + JSONPlaceholder placeholder(std::string_view name); }; class JSONPlaceholder : JSONWriter diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 74ee2f063..cb2b15b41 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -266,51 +266,63 @@ static Logger::Fields getFields(nlohmann::json & json) return fields; } -bool handleJSONLogMessage(const std::string & msg, - const Activity & act, std::map<ActivityId, Activity> & activities, bool trusted) +std::optional<nlohmann::json> parseJSONMessage(const std::string & msg) { - if (!hasPrefix(msg, "@nix ")) return false; - + if (!hasPrefix(msg, "@nix ")) return std::nullopt; try { - auto json = nlohmann::json::parse(std::string(msg, 5)); - - std::string action = json["action"]; - - if (action == "start") { - auto type = (ActivityType) json["type"]; - if (trusted || type == actFileTransfer) - activities.emplace(std::piecewise_construct, - std::forward_as_tuple(json["id"]), - std::forward_as_tuple(*logger, (Verbosity) json["level"], type, - json["text"], getFields(json["fields"]), act.id)); - } + return nlohmann::json::parse(std::string(msg, 5)); + } catch (std::exception & e) { + printError("bad JSON log message from builder: %s", e.what()); + } + return std::nullopt; +} - else if (action == "stop") - activities.erase((ActivityId) json["id"]); +bool handleJSONLogMessage(nlohmann::json & json, + const Activity & act, std::map<ActivityId, Activity> & activities, + bool trusted) +{ + std::string action = json["action"]; + + if (action == "start") { + auto type = (ActivityType) json["type"]; + if (trusted || type == actFileTransfer) + activities.emplace(std::piecewise_construct, + std::forward_as_tuple(json["id"]), + std::forward_as_tuple(*logger, (Verbosity) json["level"], type, + json["text"], getFields(json["fields"]), act.id)); + } - else if (action == "result") { - auto i = activities.find((ActivityId) json["id"]); - if (i != activities.end()) - i->second.result((ResultType) json["type"], getFields(json["fields"])); - } + else if (action == "stop") + activities.erase((ActivityId) json["id"]); - else if (action == "setPhase") { - std::string phase = json["phase"]; - act.result(resSetPhase, phase); - } + else if (action == "result") { + auto i = activities.find((ActivityId) json["id"]); + if (i != activities.end()) + i->second.result((ResultType) json["type"], getFields(json["fields"])); + } - else if (action == "msg") { - std::string msg = json["msg"]; - logger->log((Verbosity) json["level"], msg); - } + else if (action == "setPhase") { + std::string phase = json["phase"]; + act.result(resSetPhase, phase); + } - } catch (std::exception & e) { - printError("bad JSON log message from builder: %s", e.what()); + else if (action == "msg") { + std::string msg = json["msg"]; + logger->log((Verbosity) json["level"], msg); } return true; } +bool handleJSONLogMessage(const std::string & msg, + const Activity & act, std::map<ActivityId, Activity> & activities, bool trusted) +{ + auto json = parseJSONMessage(msg); + if (!json) return false; + + return handleJSONLogMessage(*json, act, activities, trusted); +} + Activity::~Activity() { try { diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index bd28036ae..d0817b4a9 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -4,6 +4,8 @@ #include "error.hh" #include "config.hh" +#include <nlohmann/json_fwd.hpp> + namespace nix { typedef enum { @@ -109,6 +111,9 @@ public: virtual std::optional<char> ask(std::string_view s) { return {}; } + + virtual void setPrintBuildLogs(bool printBuildLogs) + { } }; ActivityId getCurActivity(); @@ -166,6 +171,12 @@ Logger * makeSimpleLogger(bool printBuildLogs = true); Logger * makeJSONLogger(Logger & prevLogger); +std::optional<nlohmann::json> parseJSONMessage(const std::string & msg); + +bool handleJSONLogMessage(nlohmann::json & json, + const Activity & act, std::map<ActivityId, Activity> & activities, + bool trusted); + bool handleJSONLogMessage(const std::string & msg, const Activity & act, std::map<ActivityId, Activity> & activities, bool trusted); diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh index 347b81f73..bf26321db 100644 --- a/src/libutil/ref.hh +++ b/src/libutil/ref.hh @@ -7,7 +7,7 @@ namespace nix { /* A simple non-nullable reference-counted pointer. Actually a wrapper - around std::shared_ptr that prevents non-null constructions. */ + around std::shared_ptr that prevents null constructions. */ template<typename T> class ref { @@ -99,47 +99,4 @@ make_ref(Args&&... args) return ref<T>(p); } - -/* A non-nullable pointer. - This is similar to a C++ "& reference", but mutable. - This is similar to ref<T> but backed by a regular pointer instead of a smart pointer. - */ -template<typename T> -class ptr { -private: - T * p; - -public: - ptr<T>(const ptr<T> & r) - : p(r.p) - { } - - explicit ptr<T>(T * p) - : p(p) - { - if (!p) - throw std::invalid_argument("null pointer cast to ptr"); - } - - T* operator ->() const - { - return &*p; - } - - T& operator *() const - { - return *p; - } - - bool operator == (const ptr<T> & other) const - { - return p == other.p; - } - - bool operator != (const ptr<T> & other) const - { - return p != other.p; - } -}; - } diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 6445b3f1b..2c3597775 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -48,24 +48,9 @@ FdSink::~FdSink() } -size_t threshold = 256 * 1024 * 1024; - -static void warnLargeDump() -{ - warn("dumping very large path (> 256 MiB); this may run out of memory"); -} - - void FdSink::write(std::string_view data) { written += data.size(); - static bool warned = false; - if (warn && !warned) { - if (written > threshold) { - warnLargeDump(); - warned = true; - } - } try { writeFull(fd, data); } catch (SysError & e) { @@ -357,7 +342,7 @@ Sink & operator << (Sink & sink, const Error & ex) sink << "Error" << info.level - << info.name + << "Error" // removed << info.msg.str() << 0 // FIXME: info.errPos << info.traces.size(); @@ -426,11 +411,10 @@ Error readError(Source & source) auto type = readString(source); assert(type == "Error"); auto level = (Verbosity) readInt(source); - auto name = readString(source); + auto name = readString(source); // removed auto msg = readString(source); ErrorInfo info { .level = level, - .name = name, .msg = hintformat(std::move(format("%s") % msg)), }; auto havePos = readNum<size_t>(source); @@ -449,11 +433,6 @@ Error readError(Source & source) void StringSink::operator () (std::string_view data) { - static bool warned = false; - if (!warned && s.size() > threshold) { - warnLargeDump(); - warned = true; - } s.append(data); } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 13da26c6a..84847835a 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -97,19 +97,17 @@ protected: struct FdSink : BufferedSink { int fd; - bool warn = false; size_t written = 0; FdSink() : fd(-1) { } FdSink(int fd) : fd(fd) { } FdSink(FdSink&&) = default; - FdSink& operator=(FdSink && s) + FdSink & operator=(FdSink && s) { flush(); fd = s.fd; s.fd = -1; - warn = s.warn; written = s.written; return *this; } diff --git a/src/libutil/suggestions.cc b/src/libutil/suggestions.cc new file mode 100644 index 000000000..9510a5f0c --- /dev/null +++ b/src/libutil/suggestions.cc @@ -0,0 +1,114 @@ +#include "suggestions.hh" +#include "ansicolor.hh" +#include "util.hh" +#include <algorithm> + +namespace nix { + +int levenshteinDistance(std::string_view first, std::string_view second) +{ + // Implementation borrowed from + // https://en.wikipedia.org/wiki/Levenshtein_distance#Iterative_with_two_matrix_rows + + int m = first.size(); + int n = second.size(); + + auto v0 = std::vector<int>(n+1); + auto v1 = std::vector<int>(n+1); + + for (auto i = 0; i <= n; i++) + v0[i] = i; + + for (auto i = 0; i < m; i++) { + v1[0] = i+1; + + for (auto j = 0; j < n; j++) { + auto deletionCost = v0[j+1] + 1; + auto insertionCost = v1[j] + 1; + auto substitutionCost = first[i] == second[j] ? v0[j] : v0[j] + 1; + v1[j+1] = std::min({deletionCost, insertionCost, substitutionCost}); + } + + std::swap(v0, v1); + } + + return v0[n]; +} + +Suggestions Suggestions::bestMatches ( + std::set<std::string> allMatches, + std::string query) +{ + std::set<Suggestion> res; + for (const auto & possibleMatch : allMatches) { + res.insert(Suggestion { + .distance = levenshteinDistance(query, possibleMatch), + .suggestion = possibleMatch, + }); + } + return Suggestions { res }; +} + +Suggestions Suggestions::trim(int limit, int maxDistance) const +{ + std::set<Suggestion> res; + + int count = 0; + + for (auto & elt : suggestions) { + if (count >= limit || elt.distance > maxDistance) + break; + count++; + res.insert(elt); + } + + return Suggestions{res}; +} + +std::string Suggestion::to_string() const +{ + return ANSI_WARNING + filterANSIEscapes(suggestion) + ANSI_NORMAL; +} + +std::string Suggestions::to_string() const +{ + switch (suggestions.size()) { + case 0: + return ""; + case 1: + return suggestions.begin()->to_string(); + default: { + std::string res = "one of "; + auto iter = suggestions.begin(); + res += iter->to_string(); // Iter can’t be end() because the container isn’t null + iter++; + auto last = suggestions.end(); last--; + for ( ; iter != suggestions.end() ; iter++) { + res += (iter == last) ? " or " : ", "; + res += iter->to_string(); + } + return res; + } + } +} + +Suggestions & Suggestions::operator+=(const Suggestions & other) +{ + suggestions.insert( + other.suggestions.begin(), + other.suggestions.end() + ); + return *this; +} + +std::ostream & operator<<(std::ostream & str, const Suggestion & suggestion) +{ + return str << suggestion.to_string(); +} + +std::ostream & operator<<(std::ostream & str, const Suggestions & suggestions) +{ + return str << suggestions.to_string(); +} + +} diff --git a/src/libutil/suggestions.hh b/src/libutil/suggestions.hh new file mode 100644 index 000000000..d54dd8e31 --- /dev/null +++ b/src/libutil/suggestions.hh @@ -0,0 +1,102 @@ +#pragma once + +#include "comparator.hh" +#include "types.hh" +#include <set> + +namespace nix { + +int levenshteinDistance(std::string_view first, std::string_view second); + +/** + * A potential suggestion for the cli interface. + */ +class Suggestion { +public: + int distance; // The smaller the better + std::string suggestion; + + std::string to_string() const; + + GENERATE_CMP(Suggestion, me->distance, me->suggestion) +}; + +class Suggestions { +public: + std::set<Suggestion> suggestions; + + std::string to_string() const; + + Suggestions trim( + int limit = 5, + int maxDistance = 2 + ) const; + + static Suggestions bestMatches ( + std::set<std::string> allMatches, + std::string query + ); + + Suggestions& operator+=(const Suggestions & other); +}; + +std::ostream & operator<<(std::ostream & str, const Suggestion &); +std::ostream & operator<<(std::ostream & str, const Suggestions &); + +// Either a value of type `T`, or some suggestions +template<typename T> +class OrSuggestions { +public: + using Raw = std::variant<T, Suggestions>; + + Raw raw; + + T* operator ->() + { + return &**this; + } + + T& operator *() + { + return std::get<T>(raw); + } + + operator bool() const noexcept + { + return std::holds_alternative<T>(raw); + } + + OrSuggestions(T t) + : raw(t) + { + } + + OrSuggestions() + : raw(Suggestions{}) + { + } + + static OrSuggestions<T> failed(const Suggestions & s) + { + auto res = OrSuggestions<T>(); + res.raw = s; + return res; + } + + static OrSuggestions<T> failed() + { + return OrSuggestions<T>::failed(Suggestions{}); + } + + const Suggestions & getSuggestions() + { + static Suggestions noSuggestions; + if (const auto & suggestions = std::get_if<Suggestions>(&raw)) + return *suggestions; + else + return noSuggestions; + } + +}; + +} diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 790bc943a..a7db58559 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -39,28 +39,30 @@ void TarArchive::check(int err, const std::string & reason) throw Error(reason, archive_error_string(this->archive)); } -TarArchive::TarArchive(Source & source, bool raw) - : source(&source), buffer(4096) +TarArchive::TarArchive(Source & source, bool raw) : buffer(4096) { - init(); - if (!raw) + this->archive = archive_read_new(); + this->source = &source; + + if (!raw) { + archive_read_support_filter_all(archive); archive_read_support_format_all(archive); - else + } else { + archive_read_support_filter_all(archive); archive_read_support_format_raw(archive); + archive_read_support_format_empty(archive); + } check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)"); } + TarArchive::TarArchive(const Path & path) { - init(); - archive_read_support_format_all(archive); - check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); -} + this->archive = archive_read_new(); -void TarArchive::init() -{ - archive = archive_read_new(); archive_read_support_filter_all(archive); + archive_read_support_format_all(archive); + check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); } void TarArchive::close() diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh index f107a7e2e..4d9141fd4 100644 --- a/src/libutil/tarfile.hh +++ b/src/libutil/tarfile.hh @@ -17,13 +17,10 @@ struct TarArchive { // disable copy constructor TarArchive(const TarArchive &) = delete; - void init(); - void close(); ~TarArchive(); }; - void unpackTarfile(Source & source, const Path & destDir); void unpackTarfile(const Path & tarFile, const Path & destDir); diff --git a/src/libutil/tests/chunked-vector.cc b/src/libutil/tests/chunked-vector.cc new file mode 100644 index 000000000..868d11f6f --- /dev/null +++ b/src/libutil/tests/chunked-vector.cc @@ -0,0 +1,54 @@ +#include "chunked-vector.hh" + +#include <gtest/gtest.h> + +namespace nix { + TEST(ChunkedVector, InitEmpty) { + auto v = ChunkedVector<int, 2>(100); + ASSERT_EQ(v.size(), 0); + } + + TEST(ChunkedVector, GrowsCorrectly) { + auto v = ChunkedVector<int, 2>(100); + for (auto i = 1; i < 20; i++) { + v.add(i); + ASSERT_EQ(v.size(), i); + } + } + + TEST(ChunkedVector, AddAndGet) { + auto v = ChunkedVector<int, 2>(100); + for (auto i = 1; i < 20; i++) { + auto [i2, idx] = v.add(i); + auto & i3 = v[idx]; + ASSERT_EQ(i, i2); + ASSERT_EQ(&i2, &i3); + } + } + + TEST(ChunkedVector, ForEach) { + auto v = ChunkedVector<int, 2>(100); + for (auto i = 1; i < 20; i++) { + v.add(i); + } + int count = 0; + v.forEach([&count](int elt) { + count++; + }); + ASSERT_EQ(count, v.size()); + } + + TEST(ChunkedVector, OverflowOK) { + // Similar to the AddAndGet, but intentionnally use a small + // initial ChunkedVector to force it to overflow + auto v = ChunkedVector<int, 2>(2); + for (auto i = 1; i < 20; i++) { + auto [i2, idx] = v.add(i); + auto & i3 = v[idx]; + ASSERT_EQ(i, i2); + ASSERT_EQ(&i2, &i3); + } + } + +} + diff --git a/src/libutil/tests/git.cc b/src/libutil/tests/git.cc new file mode 100644 index 000000000..5b5715fc2 --- /dev/null +++ b/src/libutil/tests/git.cc @@ -0,0 +1,33 @@ +#include "git.hh" +#include <gtest/gtest.h> + +namespace nix { + + TEST(GitLsRemote, parseSymrefLineWithReference) { + auto line = "ref: refs/head/main HEAD"; + auto res = git::parseLsRemoteLine(line); + ASSERT_TRUE(res.has_value()); + ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic); + ASSERT_EQ(res->target, "refs/head/main"); + ASSERT_EQ(res->reference, "HEAD"); + } + + TEST(GitLsRemote, parseSymrefLineWithNoReference) { + auto line = "ref: refs/head/main"; + auto res = git::parseLsRemoteLine(line); + ASSERT_TRUE(res.has_value()); + ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic); + ASSERT_EQ(res->target, "refs/head/main"); + ASSERT_EQ(res->reference, std::nullopt); + } + + TEST(GitLsRemote, parseObjectRefLine) { + auto line = "abc123 refs/head/main"; + auto res = git::parseLsRemoteLine(line); + ASSERT_TRUE(res.has_value()); + ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Object); + ASSERT_EQ(res->target, "abc123"); + ASSERT_EQ(res->reference, "refs/head/main"); + } +} + diff --git a/src/libutil/tests/fmt.cc b/src/libutil/tests/hilite.cc index 33772162c..1ff5980d5 100644 --- a/src/libutil/tests/fmt.cc +++ b/src/libutil/tests/hilite.cc @@ -1,9 +1,7 @@ -#include "fmt.hh" +#include "hilite.hh" #include <gtest/gtest.h> -#include <regex> - namespace nix { /* ----------- tests for fmt.hh -------------------------------------------------*/ diff --git a/src/libutil/tests/json.cc b/src/libutil/tests/json.cc index dea73f53a..156286999 100644 --- a/src/libutil/tests/json.cc +++ b/src/libutil/tests/json.cc @@ -102,8 +102,8 @@ namespace nix { TEST(toJSON, substringEscape) { std::stringstream out; - const char *s = "foo\t"; - toJSON(out, s+3, s + strlen(s)); + std::string_view s = "foo\t"; + toJSON(out, s.substr(3)); ASSERT_EQ(out.str(), "\"\\t\""); } diff --git a/src/libutil/tests/logging.cc b/src/libutil/tests/logging.cc index cef3bd481..2ffdc2e9b 100644 --- a/src/libutil/tests/logging.cc +++ b/src/libutil/tests/logging.cc @@ -359,7 +359,7 @@ namespace nix { // constructing without access violation. ErrPos ep(invalid); - + // assignment without access violation. ep = invalid; diff --git a/src/libutil/tests/suggestions.cc b/src/libutil/tests/suggestions.cc new file mode 100644 index 000000000..279994abc --- /dev/null +++ b/src/libutil/tests/suggestions.cc @@ -0,0 +1,43 @@ +#include "suggestions.hh" +#include <gtest/gtest.h> + +namespace nix { + + struct LevenshteinDistanceParam { + std::string s1, s2; + int distance; + }; + + class LevenshteinDistanceTest : + public testing::TestWithParam<LevenshteinDistanceParam> { + }; + + TEST_P(LevenshteinDistanceTest, CorrectlyComputed) { + auto params = GetParam(); + + ASSERT_EQ(levenshteinDistance(params.s1, params.s2), params.distance); + ASSERT_EQ(levenshteinDistance(params.s2, params.s1), params.distance); + } + + INSTANTIATE_TEST_SUITE_P(LevenshteinDistance, LevenshteinDistanceTest, + testing::Values( + LevenshteinDistanceParam{"foo", "foo", 0}, + LevenshteinDistanceParam{"foo", "", 3}, + LevenshteinDistanceParam{"", "", 0}, + LevenshteinDistanceParam{"foo", "fo", 1}, + LevenshteinDistanceParam{"foo", "oo", 1}, + LevenshteinDistanceParam{"foo", "fao", 1}, + LevenshteinDistanceParam{"foo", "abc", 3} + ) + ); + + TEST(Suggestions, Trim) { + auto suggestions = Suggestions::bestMatches({"foooo", "bar", "fo", "gao"}, "foo"); + auto onlyOne = suggestions.trim(1); + ASSERT_EQ(onlyOne.suggestions.size(), 1); + ASSERT_TRUE(onlyOne.suggestions.begin()->suggestion == "fo"); + + auto closest = suggestions.trim(999, 2); + ASSERT_EQ(closest.suggestions.size(), 3); + } +} diff --git a/src/libutil/tests/tests.cc b/src/libutil/tests/tests.cc index 92972ed14..6e325db98 100644 --- a/src/libutil/tests/tests.cc +++ b/src/libutil/tests/tests.cc @@ -548,7 +548,7 @@ namespace nix { TEST(get, emptyContainer) { StringMap s = { }; - auto expected = std::nullopt; + auto expected = nullptr; ASSERT_EQ(get(s, "one"), expected); } @@ -559,7 +559,23 @@ namespace nix { s["two"] = "er"; auto expected = "yi"; - ASSERT_EQ(get(s, "one"), expected); + ASSERT_EQ(*get(s, "one"), expected); + } + + TEST(getOr, emptyContainer) { + StringMap s = { }; + auto expected = "yi"; + + ASSERT_EQ(getOr(s, "one", "yi"), expected); + } + + TEST(getOr, getFromContainer) { + StringMap s; + s["one"] = "yi"; + s["two"] = "er"; + auto expected = "yi"; + + ASSERT_EQ(getOr(s, "one", "nope"), expected); } /* ---------------------------------------------------------------------------- diff --git a/src/libutil/tests/url.cc b/src/libutil/tests/url.cc index f20e2dc41..c3b233797 100644 --- a/src/libutil/tests/url.cc +++ b/src/libutil/tests/url.cc @@ -178,7 +178,7 @@ namespace nix { } TEST(parseURL, parseFileURLWithQueryAndFragment) { - auto s = "file:///none/of/your/business"; + auto s = "file:///none/of//your/business"; auto parsed = parseURL(s); ParsedURL expected { @@ -186,7 +186,7 @@ namespace nix { .base = "", .scheme = "file", .authority = "", - .path = "/none/of/your/business", + .path = "/none/of//your/business", .query = (StringMap) { }, .fragment = "", }; diff --git a/src/libutil/types.hh b/src/libutil/types.hh index 00ba567c6..6bcbd7e1d 100644 --- a/src/libutil/types.hh +++ b/src/libutil/types.hh @@ -5,6 +5,7 @@ #include <list> #include <set> #include <string> +#include <limits> #include <map> #include <variant> #include <vector> diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index da10a6bbc..d5e6a2736 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -18,7 +18,7 @@ const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncod const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?"; const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])"; const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*"; -const static std::string segmentRegex = "(?:" + pcharRegex + "+)"; +const static std::string segmentRegex = "(?:" + pcharRegex + "*)"; const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)"; const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)"; diff --git a/src/libutil/url.cc b/src/libutil/url.cc index f6232d255..5b7abeb49 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -1,6 +1,7 @@ #include "url.hh" #include "url-parts.hh" #include "util.hh" +#include "split.hh" namespace nix { @@ -136,4 +137,21 @@ bool ParsedURL::operator ==(const ParsedURL & other) const && fragment == other.fragment; } +/** + * Parse a URL scheme of the form '(applicationScheme\+)?transportScheme' + * into a tuple '(applicationScheme, transportScheme)' + * + * > parseUrlScheme("http") == ParsedUrlScheme{ {}, "http"} + * > parseUrlScheme("tarball+http") == ParsedUrlScheme{ {"tarball"}, "http"} + */ +ParsedUrlScheme parseUrlScheme(std::string_view scheme) +{ + auto application = splitPrefixTo(scheme, '+'); + auto transport = scheme; + return ParsedUrlScheme { + .application = application, + .transport = transport, + }; +} + } diff --git a/src/libutil/url.hh b/src/libutil/url.hh index 6e77142e3..2a9fb34c1 100644 --- a/src/libutil/url.hh +++ b/src/libutil/url.hh @@ -27,4 +27,19 @@ std::map<std::string, std::string> decodeQuery(const std::string & query); ParsedURL parseURL(const std::string & url); +/* + * Although that’s not really standardized anywhere, an number of tools + * use a scheme of the form 'x+y' in urls, where y is the “transport layer” + * scheme, and x is the “application layer” scheme. + * + * For example git uses `git+https` to designate remotes using a Git + * protocol over http. + */ +struct ParsedUrlScheme { + std::optional<std::string_view> application; + std::string_view transport; +}; + +ParsedUrlScheme parseUrlScheme(std::string_view scheme); + } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 88c5ae806..623b74bdd 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -29,11 +29,15 @@ #ifdef __APPLE__ #include <sys/syscall.h> +#include <mach-o/dyld.h> #endif #ifdef __linux__ #include <sys/prctl.h> #include <sys/resource.h> + +#include <mntent.h> +#include <cmath> #endif @@ -71,13 +75,11 @@ void clearEnv() unsetenv(name.first.c_str()); } -void replaceEnv(std::map<std::string, std::string> newEnv) +void replaceEnv(const std::map<std::string, std::string> & newEnv) { clearEnv(); - for (auto newEnvVar : newEnv) - { + for (auto & newEnvVar : newEnv) setenv(newEnvVar.first.c_str(), newEnvVar.second.c_str(), 1); - } } @@ -200,6 +202,17 @@ std::string_view baseNameOf(std::string_view path) } +std::string expandTilde(std::string_view path) +{ + // TODO: expand ~user ? + auto tilde = path.substr(0, 2); + if (tilde == "~/" || tilde == "~") + return getHome() + std::string(path.substr(1)); + else + return std::string(path); +} + + bool isInDir(std::string_view path, std::string_view dir) { return path.substr(0, 1) == "/" @@ -215,6 +228,15 @@ bool isDirOrInDir(std::string_view path, std::string_view dir) } +struct stat stat(const Path & path) +{ + struct stat st; + if (stat(path.c_str(), &st)) + throw SysError("getting status of '%1%'", path); + return st; +} + + struct stat lstat(const Path & path) { struct stat st; @@ -331,7 +353,7 @@ void readFile(const Path & path, Sink & sink) } -void writeFile(const Path & path, std::string_view s, mode_t mode) +void writeFile(const Path & path, std::string_view s, mode_t mode, bool sync) { AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode); if (!fd) @@ -342,10 +364,16 @@ void writeFile(const Path & path, std::string_view s, mode_t mode) e.addTrace({}, "writing file '%1%'", path); throw; } + if (sync) + fd.fsync(); + // Explicitly close to make sure exceptions are propagated. + fd.close(); + if (sync) + syncParent(path); } -void writeFile(const Path & path, Source & source, mode_t mode) +void writeFile(const Path & path, Source & source, mode_t mode, bool sync) { AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode); if (!fd) @@ -364,6 +392,20 @@ void writeFile(const Path & path, Source & source, mode_t mode) e.addTrace({}, "writing file '%1%'", path); throw; } + if (sync) + fd.fsync(); + // Explicitly close to make sure exceptions are propagated. + fd.close(); + if (sync) + syncParent(path); +} + +void syncParent(const Path & path) +{ + AutoCloseFD fd = open(dirOf(path).c_str(), O_RDONLY, 0); + if (!fd) + throw SysError("opening file '%1%'", path); + fd.fsync(); } std::string readLine(int fd) @@ -406,8 +448,29 @@ static void _deletePath(int parentfd, const Path & path, uint64_t & bytesFreed) throw SysError("getting status of '%1%'", path); } - if (!S_ISDIR(st.st_mode) && st.st_nlink == 1) - bytesFreed += st.st_size; + if (!S_ISDIR(st.st_mode)) { + /* We are about to delete a file. Will it likely free space? */ + + switch (st.st_nlink) { + /* Yes: last link. */ + case 1: + bytesFreed += st.st_size; + break; + /* Maybe: yes, if 'auto-optimise-store' or manual optimisation + was performed. Instead of checking for real let's assume + it's an optimised file and space will be freed. + + In worst case we will double count on freed space for files + with exactly two hardlinks for unoptimised packages. + */ + case 2: + bytesFreed += st.st_size; + break; + /* No: 3+ links. */ + default: + break; + } + } if (S_ISDIR(st.st_mode)) { /* Make the directory accessible. */ @@ -465,61 +528,6 @@ void deletePath(const Path & path, uint64_t & bytesFreed) } -static Path tempName(Path tmpRoot, const Path & prefix, bool includePid, - int & counter) -{ - tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true); - if (includePid) - return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str(); - else - return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str(); -} - - -Path createTempDir(const Path & tmpRoot, const Path & prefix, - bool includePid, bool useGlobalCounter, mode_t mode) -{ - static int globalCounter = 0; - int localCounter = 0; - int & counter(useGlobalCounter ? globalCounter : localCounter); - - while (1) { - checkInterrupt(); - Path tmpDir = tempName(tmpRoot, prefix, includePid, counter); - if (mkdir(tmpDir.c_str(), mode) == 0) { -#if __FreeBSD__ - /* Explicitly set the group of the directory. This is to - work around around problems caused by BSD's group - ownership semantics (directories inherit the group of - the parent). For instance, the group of /tmp on - FreeBSD is "wheel", so all directories created in /tmp - will be owned by "wheel"; but if the user is not in - "wheel", then "tar" will fail to unpack archives that - have the setgid bit set on directories. */ - if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0) - throw SysError("setting group of directory '%1%'", tmpDir); -#endif - return tmpDir; - } - if (errno != EEXIST) - throw SysError("creating directory '%1%'", tmpDir); - } -} - - -std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix) -{ - Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX"); - // Strictly speaking, this is UB, but who cares... - // FIXME: use O_TMPFILE. - AutoCloseFD fd(mkstemp((char *) tmpl.c_str())); - if (!fd) - throw SysError("creating temporary file '%s'", tmpl); - closeOnExec(fd.get()); - return {std::move(fd), tmpl}; -} - - std::string getUserName() { auto pw = getpwuid(geteuid()); @@ -534,7 +542,21 @@ Path getHome() { static Path homeDir = []() { + std::optional<std::string> unownedUserHomeDir = {}; auto homeDir = getEnv("HOME"); + if (homeDir) { + // Only use $HOME if doesn't exist or is owned by the current user. + struct stat st; + int result = stat(homeDir->c_str(), &st); + if (result != 0) { + if (errno != ENOENT) { + warn("couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", *homeDir, errno); + homeDir.reset(); + } + } else if (st.st_uid != geteuid()) { + unownedUserHomeDir.swap(homeDir); + } + } if (!homeDir) { std::vector<char> buf(16384); struct passwd pwbuf; @@ -543,6 +565,9 @@ Path getHome() || !pw || !pw->pw_dir || !pw->pw_dir[0]) throw Error("cannot determine user's home directory"); homeDir = pw->pw_dir; + if (unownedUserHomeDir.has_value() && unownedUserHomeDir != homeDir) { + warn("$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file ('%s')", *unownedUserHomeDir, *homeDir); + } } return *homeDir; }(); @@ -580,6 +605,27 @@ Path getDataDir() } +std::optional<Path> getSelfExe() +{ + static auto cached = []() -> std::optional<Path> + { + #if __linux__ + return readLink("/proc/self/exe"); + #elif __APPLE__ + char buf[1024]; + uint32_t size = sizeof(buf); + if (_NSGetExecutablePath(buf, &size) == 0) + return buf; + else + return std::nullopt; + #else + return std::nullopt; + #endif + }(); + return cached; +} + + Paths createDirs(const Path & path) { Paths created; @@ -603,44 +649,6 @@ Paths createDirs(const Path & path) } -void createSymlink(const Path & target, const Path & link, - std::optional<time_t> mtime) -{ - if (symlink(target.c_str(), link.c_str())) - throw SysError("creating symlink from '%1%' to '%2%'", link, target); - if (mtime) { - struct timeval times[2]; - times[0].tv_sec = *mtime; - times[0].tv_usec = 0; - times[1].tv_sec = *mtime; - times[1].tv_usec = 0; - if (lutimes(link.c_str(), times)) - throw SysError("setting time of symlink '%s'", link); - } -} - - -void replaceSymlink(const Path & target, const Path & link, - std::optional<time_t> mtime) -{ - for (unsigned int n = 0; true; n++) { - Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link))); - - try { - createSymlink(target, tmp, mtime); - } catch (SysError & e) { - if (e.errNo == EEXIST) continue; - throw; - } - - if (rename(tmp.c_str(), link.c_str()) != 0) - throw SysError("renaming '%1%' to '%2%'", tmp, link); - - break; - } -} - - void readFull(int fd, char * buf, size_t count) { while (count) { @@ -682,7 +690,14 @@ std::string drainFD(int fd, bool block, const size_t reserveSize) void drainFD(int fd, Sink & sink, bool block) { - int saved; + // silence GCC maybe-uninitialized warning in finally + int saved = 0; + + if (!block) { + saved = fcntl(fd, F_GETFL); + if (fcntl(fd, F_SETFL, saved | O_NONBLOCK) == -1) + throw SysError("making file descriptor non-blocking"); + } Finally finally([&]() { if (!block) { @@ -691,12 +706,6 @@ void drainFD(int fd, Sink & sink, bool block) } }); - if (!block) { - saved = fcntl(fd, F_GETFL); - if (fcntl(fd, F_SETFL, saved | O_NONBLOCK) == -1) - throw SysError("making file descriptor non-blocking"); - } - std::vector<unsigned char> buf(64 * 1024); while (1) { checkInterrupt(); @@ -712,7 +721,55 @@ void drainFD(int fd, Sink & sink, bool block) } } +////////////////////////////////////////////////////////////////////// + +unsigned int getMaxCPU() +{ + #if __linux__ + try { + FILE *fp = fopen("/proc/mounts", "r"); + if (!fp) + return 0; + + Strings cgPathParts; + + struct mntent *ent; + while ((ent = getmntent(fp))) { + std::string mountType, mountPath; + mountType = ent->mnt_type; + mountPath = ent->mnt_dir; + + if (mountType == "cgroup2") { + cgPathParts.push_back(mountPath); + break; + } + } + + fclose(fp); + + if (cgPathParts.size() > 0 && pathExists("/proc/self/cgroup")) { + std::string currentCgroup = readFile("/proc/self/cgroup"); + Strings cgValues = tokenizeString<Strings>(currentCgroup, ":"); + cgPathParts.push_back(trim(cgValues.back(), "\n")); + cgPathParts.push_back("cpu.max"); + std::string fullCgPath = canonPath(concatStringsSep("/", cgPathParts)); + + if (pathExists(fullCgPath)) { + std::string cpuMax = readFile(fullCgPath); + std::vector<std::string> cpuMaxParts = tokenizeString<std::vector<std::string>>(cpuMax, " "); + std::string quota = cpuMaxParts[0]; + std::string period = trim(cpuMaxParts[1], "\n"); + + if (quota != "max") + return std::ceil(std::stoi(quota) / std::stof(period)); + } + } + } catch (Error &) { ignoreException(); } + #endif + + return 0; +} ////////////////////////////////////////////////////////////////////// @@ -804,6 +861,20 @@ void AutoCloseFD::close() } } +void AutoCloseFD::fsync() +{ + if (fd != -1) { + int result; +#if __APPLE__ + result = ::fcntl(fd, F_FULLFSYNC); +#else + result = ::fsync(fd); +#endif + if (result == -1) + throw SysError("fsync file descriptor %1%", fd); + } +} + AutoCloseFD::operator bool() const { @@ -1042,7 +1113,7 @@ std::string runProgram(Path program, bool searchPath, const Strings & args, auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input}); if (!statusOk(res.first)) - throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first))); + throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first)); return res.second; } @@ -1170,7 +1241,7 @@ void runProgram2(const RunOptions & options) if (source) promise.get_future().get(); if (status) - throw ExecError(status, fmt("program '%1%' %2%", options.program, statusToString(status))); + throw ExecError(status, "program '%1%' %2%", options.program, statusToString(status)); } @@ -1239,9 +1310,9 @@ template<class C> C tokenizeString(std::string_view s, std::string_view separato { C result; auto pos = s.find_first_not_of(separators, 0); - while (pos != std::string::npos) { + while (pos != std::string_view::npos) { auto end = s.find_first_of(separators, pos + 1); - if (end == std::string::npos) end = s.size(); + if (end == std::string_view::npos) end = s.size(); result.insert(result.end(), std::string(s, pos, end - pos)); pos = s.find_first_not_of(separators, end); } @@ -1263,9 +1334,9 @@ std::string chomp(std::string_view s) std::string trim(std::string_view s, std::string_view whitespace) { auto i = s.find_first_not_of(whitespace); - if (i == std::string_view::npos) return ""; + if (i == s.npos) return ""; auto j = s.find_last_not_of(whitespace); - return std::string(s, i, j == std::string::npos ? j : j - i + 1); + return std::string(s, i, j == s.npos ? j : j - i + 1); } @@ -1412,7 +1483,7 @@ std::string filterANSIEscapes(const std::string & s, bool filterAll, unsigned in } } - else if (*i == '\r') + else if (*i == '\r' || *i == '\a') // do nothing for now i++; @@ -1451,6 +1522,7 @@ constexpr char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuv std::string base64Encode(std::string_view s) { std::string res; + res.reserve((s.size() + 2) / 3 * 4); int data = 0, nbits = 0; for (char c : s) { @@ -1482,6 +1554,9 @@ std::string base64Decode(std::string_view s) }(); std::string res; + // Some sequences are missing the padding consisting of up to two '='. + // vvv + res.reserve((s.size() + 2) / 4 * 3); unsigned int d = 0, bits = 0; for (char c : s) { @@ -1544,7 +1619,6 @@ std::string stripIndentation(std::string_view s) ////////////////////////////////////////////////////////////////////// - static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}}; @@ -1668,7 +1742,9 @@ void setStackSize(size_t stackSize) #endif } +#if __linux__ static AutoCloseFD fdSavedMountNamespace; +#endif void saveMountNamespace() { @@ -1687,8 +1763,13 @@ void restoreMountNamespace() { #if __linux__ try { + auto savedCwd = absPath("."); + if (fdSavedMountNamespace && setns(fdSavedMountNamespace.get(), CLONE_NEWNS) == -1) throw SysError("restoring parent mount namespace"); + if (chdir(savedCwd.c_str()) == -1) { + throw SysError("restoring cwd"); + } } catch (Error & e) { debug(e.msg()); } @@ -1768,7 +1849,7 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) if (chmod(path.c_str(), mode) == -1) throw SysError("changing permissions on '%1%'", path); - if (listen(fdSocket.get(), 5) == -1) + if (listen(fdSocket.get(), 100) == -1) throw SysError("cannot listen on socket '%1%'", path); return fdSocket; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 20591952d..e5c678682 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -68,6 +68,9 @@ Path dirOf(const PathView path); following the final `/' (trailing slashes are removed). */ std::string_view baseNameOf(std::string_view path); +/* Perform tilde expansion on a path. */ +std::string expandTilde(std::string_view path); + /* Check whether 'path' is a descendant of 'dir'. Both paths must be canonicalized. */ bool isInDir(std::string_view path, std::string_view dir); @@ -77,6 +80,7 @@ bool isInDir(std::string_view path, std::string_view dir); bool isDirOrInDir(std::string_view path, std::string_view dir); /* Get status of `path'. */ +struct stat stat(const Path & path); struct stat lstat(const Path & path); /* Return true iff the given path exists. */ @@ -111,9 +115,12 @@ std::string readFile(const Path & path); void readFile(const Path & path, Sink & sink); /* Write a string to a file. */ -void writeFile(const Path & path, std::string_view s, mode_t mode = 0666); +void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, bool sync = false); + +void writeFile(const Path & path, Source & source, mode_t mode = 0666, bool sync = false); -void writeFile(const Path & path, Source & source, mode_t mode = 0666); +/* Flush a file's parent directory to disk */ +void syncParent(const Path & path); /* Read a line from a file descriptor. */ std::string readLine(int fd); @@ -145,10 +152,14 @@ std::vector<Path> getConfigDirs(); /* Return $XDG_DATA_HOME or $HOME/.local/share. */ Path getDataDir(); +/* Return the path of the current executable. */ +std::optional<Path> getSelfExe(); + /* Create a directory and all its parents, if necessary. Returns the list of created directories, in order of creation. */ Paths createDirs(const Path & path); -inline Paths createDirs(PathView path) { +inline Paths createDirs(PathView path) +{ return createDirs(Path(path)); } @@ -160,6 +171,17 @@ void createSymlink(const Path & target, const Path & link, void replaceSymlink(const Path & target, const Path & link, std::optional<time_t> mtime = {}); +void renameFile(const Path & src, const Path & dst); + +/** + * Similar to 'renameFile', but fallback to a copy+remove if `src` and `dst` + * are on a different filesystem. + * + * Beware that this might not be atomic because of the copy that happens behind + * the scenes + */ +void moveFile(const Path & src, const Path & dst); + /* Wrappers arount read()/write() that read/write exactly the requested number of bytes. */ @@ -174,6 +196,9 @@ std::string drainFD(int fd, bool block = true, const size_t reserveSize=0); void drainFD(int fd, Sink & sink, bool block = true); +/* If cgroups are active, attempt to calculate the number of CPUs available. + If cgroups are unavailable or if cpu.max is set to "max", return 0. */ +unsigned int getMaxCPU(); /* Automatic cleanup of resources. */ @@ -209,6 +234,7 @@ public: explicit operator bool() const; int release(); void close(); + void fsync(); }; @@ -539,13 +565,31 @@ std::string stripIndentation(std::string_view s); /* Get a value for the specified key from an associate container. */ template <class T> -std::optional<typename T::mapped_type> get(const T & map, const typename T::key_type & key) +const typename T::mapped_type * get(const T & map, const typename T::key_type & key) +{ + auto i = map.find(key); + if (i == map.end()) return nullptr; + return &i->second; +} + +template <class T> +typename T::mapped_type * get(T & map, const typename T::key_type & key) { auto i = map.find(key); - if (i == map.end()) return {}; - return std::optional<typename T::mapped_type>(i->second); + if (i == map.end()) return nullptr; + return &i->second; } +/* Get a value for the specified key from an associate container, or a default value if the key isn't present. */ +template <class T> +const typename T::mapped_type & getOr(T & map, + const typename T::key_type & key, + const typename T::mapped_type & defaultValue) +{ + auto i = map.find(key); + if (i == map.end()) return defaultValue; + return i->second; +} /* Remove and return the first item from a container. */ template <class T> @@ -678,4 +722,19 @@ template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>; std::string showBytes(uint64_t bytes); +/* Provide an addition operator between strings and string_views + inexplicably omitted from the standard library. */ +inline std::string operator + (const std::string & s1, std::string_view s2) +{ + auto s = s1; + s.append(s2); + return s; +} + +inline std::string operator + (std::string && s, std::string_view s2) +{ + s.append(s2); + return std::move(s); +} + } diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 3d189f934..adcaab686 100755..100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -85,7 +85,6 @@ static void main_nix_build(int argc, char * * argv) Strings attrPaths; Strings left; RepairFlag repair = NoRepair; - Path gcRoot; BuildMode buildMode = bmNormal; bool readStdin = false; @@ -167,9 +166,6 @@ static void main_nix_build(int argc, char * * argv) else if (*arg == "--out-link" || *arg == "-o") outLink = getArg(*arg, arg, end); - else if (*arg == "--add-root") - gcRoot = getArg(*arg, arg, end); - else if (*arg == "--dry-run") dryRun = true; @@ -257,11 +253,12 @@ static void main_nix_build(int argc, char * * argv) auto autoArgs = myArgs.getAutoArgs(*state); + auto autoArgsWithInNixShell = autoArgs; if (runEnv) { - auto newArgs = state->buildBindings(autoArgs->size() + 1); + auto newArgs = state->buildBindings(autoArgsWithInNixShell->size() + 1); newArgs.alloc("inNixShell").mkBool(true); for (auto & i : *autoArgs) newArgs.insert(i); - autoArgs = newArgs.finish(); + autoArgsWithInNixShell = newArgs.finish(); } if (packages) { @@ -316,17 +313,45 @@ static void main_nix_build(int argc, char * * argv) Value vRoot; state->eval(e, vRoot); + std::function<bool(const Value & v)> takesNixShellAttr; + takesNixShellAttr = [&](const Value & v) { + if (!runEnv) { + return false; + } + bool add = false; + if (v.type() == nFunction && v.lambda.fun->hasFormals()) { + for (auto & i : v.lambda.fun->formals->formals) { + if (state->symbols[i.name] == "inNixShell") { + add = true; + break; + } + } + } + return add; + }; + for (auto & i : attrPaths) { - Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot).first); + Value & v(*findAlongAttrPath( + *state, + i, + takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs, + vRoot + ).first); state->forceValue(v, [&]() { return v.determinePos(noPos); }); - getDerivations(*state, v, "", *autoArgs, drvs, false); + getDerivations( + *state, + v, + "", + takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs, + drvs, + false + ); } } state->printStats(); - auto buildPaths = [&](const std::vector<StorePathWithOutputs> & paths0) { - auto paths = toDerivedPaths(paths0); + auto buildPaths = [&](const std::vector<DerivedPath> & paths) { /* Note: we do this even when !printMissing to efficiently fetch binary cache data. */ uint64_t downloadSize, narSize; @@ -346,9 +371,9 @@ static void main_nix_build(int argc, char * * argv) throw UsageError("nix-shell requires a single derivation"); auto & drvInfo = drvs.front(); - auto drv = evalStore->derivationFromPath(evalStore->parseStorePath(drvInfo.queryDrvPath())); + auto drv = evalStore->derivationFromPath(drvInfo.requireDrvPath()); - std::vector<StorePathWithOutputs> pathsToBuild; + std::vector<DerivedPath> pathsToBuild; RealisedPath::Set pathsToCopy; /* Figure out what bash shell to use. If $NIX_BUILD_SHELL @@ -369,8 +394,11 @@ static void main_nix_build(int argc, char * * argv) if (!drv) throw Error("the 'bashInteractive' attribute in <nixpkgs> did not evaluate to a derivation"); - auto bashDrv = store->parseStorePath(drv->queryDrvPath()); - pathsToBuild.push_back({bashDrv}); + auto bashDrv = drv->requireDrvPath(); + pathsToBuild.push_back(DerivedPath::Built { + .drvPath = bashDrv, + .outputs = {"out"}, + }); pathsToCopy.insert(bashDrv); shellDrv = bashDrv; @@ -382,17 +410,24 @@ static void main_nix_build(int argc, char * * argv) } // Build or fetch all dependencies of the derivation. - for (const auto & input : drv.inputDrvs) + for (const auto & [inputDrv0, inputOutputs] : drv.inputDrvs) { + // To get around lambda capturing restrictions in the + // standard. + const auto & inputDrv = inputDrv0; if (std::all_of(envExclude.cbegin(), envExclude.cend(), [&](const std::string & exclude) { - return !std::regex_search(store->printStorePath(input.first), std::regex(exclude)); + return !std::regex_search(store->printStorePath(inputDrv), std::regex(exclude)); })) { - pathsToBuild.push_back({input.first, input.second}); - pathsToCopy.insert(input.first); + pathsToBuild.push_back(DerivedPath::Built { + .drvPath = inputDrv, + .outputs = inputOutputs + }); + pathsToCopy.insert(inputDrv); } + } for (const auto & src : drv.inputSrcs) { - pathsToBuild.push_back({src}); + pathsToBuild.push_back(DerivedPath::Opaque{src}); pathsToCopy.insert(src); } @@ -431,7 +466,7 @@ static void main_nix_build(int argc, char * * argv) env["NIX_STORE"] = store->storeDir; env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores); - auto passAsFile = tokenizeString<StringSet>(get(drv.env, "passAsFile").value_or("")); + auto passAsFile = tokenizeString<StringSet>(getOr(drv.env, "passAsFile", "")); bool keepTmp = false; int fileNr = 0; @@ -458,10 +493,7 @@ static void main_nix_build(int argc, char * * argv) } } - ParsedDerivation parsedDrv( - StorePath(store->parseStorePath(drvInfo.queryDrvPath())), - drv - ); + ParsedDerivation parsedDrv(drvInfo.requireDrvPath(), drv); if (auto structAttrs = parsedDrv.prepareStructuredAttrs(*store, inputs)) { auto json = structAttrs.value(); @@ -546,20 +578,20 @@ static void main_nix_build(int argc, char * * argv) else { - std::vector<StorePathWithOutputs> pathsToBuild; + std::vector<DerivedPath> pathsToBuild; std::vector<std::pair<StorePath, std::string>> pathsToBuildOrdered; RealisedPath::Set drvsToCopy; std::map<StorePath, std::pair<size_t, StringSet>> drvMap; for (auto & drvInfo : drvs) { - auto drvPath = store->parseStorePath(drvInfo.queryDrvPath()); + auto drvPath = drvInfo.requireDrvPath(); auto outputName = drvInfo.queryOutputName(); if (outputName == "") throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath)); - pathsToBuild.push_back({drvPath, {outputName}}); + pathsToBuild.push_back(DerivedPath::Built{drvPath, {outputName}}); pathsToBuildOrdered.push_back({drvPath, {outputName}}); drvsToCopy.insert(drvPath); diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index f46b90b90..cf52b03b4 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -96,7 +96,7 @@ static void update(const StringSet & channelNames) std::smatch match; auto urlBase = std::string(baseNameOf(url)); if (std::regex_search(urlBase, match, std::regex("(-\\d.*)$"))) - cname = cname + (std::string) match[1]; + cname = cname + match.str(1); std::string extraAttrs; diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index 48c030b18..e413faffe 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -1,4 +1,6 @@ #include "store-api.hh" +#include "store-cast.hh" +#include "gc-store.hh" #include "profiles.hh" #include "shared.hh" #include "globals.hh" @@ -35,6 +37,7 @@ void removeOldGenerations(std::string dir) link = readLink(path); } catch (SysError & e) { if (e.errNo == ENOENT) continue; + throw; } if (link.find("link") != std::string::npos) { printInfo(format("removing old generations of profile %1%") % path); @@ -80,10 +83,11 @@ static int main_nix_collect_garbage(int argc, char * * argv) // Run the actual garbage collector. if (!dryRun) { auto store = openStore(); + auto & gcStore = require<GcStore>(*store); options.action = GCOptions::gcDeleteDead; GCResults results; PrintFreed freed(true, results); - store->collectGarbage(options, results); + gcStore.collectGarbage(options, results); } return 0; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 12e08bbe1..fdd66220a 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -128,7 +128,12 @@ static void getAllExprs(EvalState & state, if (hasSuffix(attrName, ".nix")) attrName = std::string(attrName, 0, attrName.size() - 4); if (!seen.insert(attrName).second) { - printError("warning: name collision in input Nix expressions, skipping '%1%'", path2); + std::string suggestionMessage = ""; + if (path2.find("channels") != std::string::npos && path.find("channels") != std::string::npos) { + suggestionMessage = fmt("\nsuggestion: remove '%s' from either the root channels or the user channels", attrName); + } + printError("warning: name collision in input Nix expressions, skipping '%1%'" + "%2%", path2, suggestionMessage); continue; } /* Load the expression on demand. */ @@ -211,7 +216,7 @@ static long comparePriorities(EvalState & state, DrvInfo & drv1, DrvInfo & drv2) // at a time. static bool isPrebuilt(EvalState & state, DrvInfo & elem) { - auto path = state.store->parseStorePath(elem.queryOutPath()); + auto path = elem.queryOutPath(); if (state.store->isValidPath(path)) return true; return state.store->querySubstitutablePaths({path}).count(path); } @@ -429,14 +434,15 @@ static void queryInstSources(EvalState & state, elem.setName(name); if (path.isDerivation()) { - elem.setDrvPath(state.store->printStorePath(path)); + elem.setDrvPath(path); auto outputs = state.store->queryDerivationOutputMap(path); - elem.setOutPath(state.store->printStorePath(outputs.at("out"))); + elem.setOutPath(outputs.at("out")); if (name.size() >= drvExtension.size() && std::string(name, name.size() - drvExtension.size()) == drvExtension) name = name.substr(0, name.size() - drvExtension.size()); } - else elem.setOutPath(state.store->printStorePath(path)); + else + elem.setOutPath(path); elems.push_back(elem); } @@ -470,13 +476,11 @@ static void queryInstSources(EvalState & state, static void printMissing(EvalState & state, DrvInfos & elems) { std::vector<DerivedPath> targets; - for (auto & i : elems) { - Path drvPath = i.queryDrvPath(); - if (drvPath != "") - targets.push_back(DerivedPath::Built{state.store->parseStorePath(drvPath)}); + for (auto & i : elems) + if (auto drvPath = i.queryDrvPath()) + targets.push_back(DerivedPath::Built{*drvPath}); else - targets.push_back(DerivedPath::Opaque{state.store->parseStorePath(i.queryOutPath())}); - } + targets.push_back(DerivedPath::Opaque{i.queryOutPath()}); printMissing(state.store, targets); } @@ -744,14 +748,11 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) if (globals.forceName != "") drv.setName(globals.forceName); + auto drvPath = drv.queryDrvPath(); std::vector<DerivedPath> paths { - (drv.queryDrvPath() != "") - ? (DerivedPath) (DerivedPath::Built { - globals.state->store->parseStorePath(drv.queryDrvPath()) - }) - : (DerivedPath) (DerivedPath::Opaque { - globals.state->store->parseStorePath(drv.queryOutPath()) - }), + drvPath + ? (DerivedPath) (DerivedPath::Built { *drvPath }) + : (DerivedPath) (DerivedPath::Opaque { drv.queryOutPath() }), }; printMissing(globals.state->store, paths); if (globals.dryRun) return; @@ -759,8 +760,9 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) debug(format("switching to new user environment")); Path generation = createGeneration( - ref<LocalFSStore>(store2), globals.profile, - store2->parseStorePath(drv.queryOutPath())); + ref<LocalFSStore>(store2), + globals.profile, + drv.queryOutPath()); switchLink(globals.profile, generation); } @@ -780,7 +782,7 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, split = std::partition( workingElems.begin(), workingElems.end(), [&selectorStorePath, globals](auto &elem) { - return selectorStorePath != globals.state->store->parseStorePath(elem.queryOutPath()); + return selectorStorePath != elem.queryOutPath(); } ); } else { @@ -921,12 +923,16 @@ static void queryJSON(Globals & globals, std::vector<DrvInfo> & elems, bool prin pkgObj.attr("pname", drvName.name); pkgObj.attr("version", drvName.version); pkgObj.attr("system", i.querySystem()); + pkgObj.attr("outputName", i.queryOutputName()); - if (printOutPath) { - DrvInfo::Outputs outputs = i.queryOutputs(); + { + DrvInfo::Outputs outputs = i.queryOutputs(printOutPath); JSONObject outputObj = pkgObj.object("outputs"); for (auto & j : outputs) { - outputObj.attr(j.first, j.second); + if (j.second) + outputObj.attr(j.first, globals.state->store->printStorePath(*j.second)); + else + outputObj.attr(j.first, nullptr); } } @@ -934,12 +940,12 @@ static void queryJSON(Globals & globals, std::vector<DrvInfo> & elems, bool prin JSONObject metaObj = pkgObj.object("meta"); StringSet metaNames = i.queryMetaNames(); for (auto & j : metaNames) { - auto placeholder = metaObj.placeholder(j); Value * v = i.queryMeta(j); if (!v) { printError("derivation '%s' has invalid meta attribute '%s'", i.queryName(), j); - placeholder.write(nullptr); + metaObj.attr(j, nullptr); } else { + auto placeholder = metaObj.placeholder(j); PathSet context; printValueAsJSON(*globals.state, true, *v, noPos, placeholder, context); } @@ -957,6 +963,8 @@ static void queryJSON(Globals & globals, std::vector<DrvInfo> & elems, bool prin static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) { + auto & store { *globals.state->store }; + Strings remaining; std::string attrPath; @@ -1027,12 +1035,11 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) /* We only need to know the installed paths when we are querying the status of the derivation. */ - PathSet installed; /* installed paths */ + StorePathSet installed; /* installed paths */ - if (printStatus) { + if (printStatus) for (auto & i : installedElems) installed.insert(i.queryOutPath()); - } /* Query which paths have substitutes. */ @@ -1042,19 +1049,20 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) StorePathSet paths; for (auto & i : elems) try { - paths.insert(globals.state->store->parseStorePath(i.queryOutPath())); + paths.insert(i.queryOutPath()); } catch (AssertionError & e) { printMsg(lvlTalkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName()); i.setFailed(); } - validPaths = globals.state->store->queryValidPaths(paths); - substitutablePaths = globals.state->store->querySubstitutablePaths(paths); + validPaths = store.queryValidPaths(paths); + substitutablePaths = store.querySubstitutablePaths(paths); } /* Print the desired columns, or XML output. */ if (jsonOutput) { queryJSON(globals, elems, printOutPath, printMeta); + cout << '\n'; return; } @@ -1073,8 +1081,8 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) //Activity act(*logger, lvlDebug, format("outputting query result '%1%'") % i.attrPath); if (globals.prebuiltOnly && - !validPaths.count(globals.state->store->parseStorePath(i.queryOutPath())) && - !substitutablePaths.count(globals.state->store->parseStorePath(i.queryOutPath()))) + !validPaths.count(i.queryOutPath()) && + !substitutablePaths.count(i.queryOutPath())) continue; /* For table output. */ @@ -1084,10 +1092,10 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) XMLAttrs attrs; if (printStatus) { - Path outPath = i.queryOutPath(); - bool hasSubs = substitutablePaths.count(globals.state->store->parseStorePath(outPath)); - bool isInstalled = installed.find(outPath) != installed.end(); - bool isValid = validPaths.count(globals.state->store->parseStorePath(outPath)); + auto outPath = i.queryOutPath(); + bool hasSubs = substitutablePaths.count(outPath); + bool isInstalled = installed.count(outPath); + bool isValid = validPaths.count(outPath); if (xmlOutput) { attrs["installed"] = isInstalled ? "1" : "0"; attrs["valid"] = isValid ? "1" : "0"; @@ -1152,18 +1160,21 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if (printDrvPath) { auto drvPath = i.queryDrvPath(); if (xmlOutput) { - if (drvPath != "") attrs["drvPath"] = drvPath; + if (drvPath) attrs["drvPath"] = store.printStorePath(*drvPath); } else - columns.push_back(drvPath == "" ? "-" : drvPath); + columns.push_back(drvPath ? store.printStorePath(*drvPath) : "-"); } + if (xmlOutput) + attrs["outputName"] = i.queryOutputName(); + if (printOutPath && !xmlOutput) { DrvInfo::Outputs outputs = i.queryOutputs(); std::string s; for (auto & j : outputs) { if (!s.empty()) s += ';'; if (j.first != "out") { s += j.first; s += "="; } - s += j.second; + s += store.printStorePath(*j.second); } columns.push_back(s); } @@ -1177,71 +1188,67 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) } if (xmlOutput) { - if (printOutPath || printMeta) { - XMLOpenElement item(xml, "item", attrs); - if (printOutPath) { - DrvInfo::Outputs outputs = i.queryOutputs(); - for (auto & j : outputs) { - XMLAttrs attrs2; - attrs2["name"] = j.first; - attrs2["path"] = j.second; - xml.writeEmptyElement("output", attrs2); - } - } - if (printMeta) { - StringSet metaNames = i.queryMetaNames(); - for (auto & j : metaNames) { - XMLAttrs attrs2; - attrs2["name"] = j; - Value * v = i.queryMeta(j); - if (!v) - printError( - "derivation '%s' has invalid meta attribute '%s'", - i.queryName(), j); - else { - if (v->type() == nString) { - attrs2["type"] = "string"; - attrs2["value"] = v->string.s; - xml.writeEmptyElement("meta", attrs2); - } else if (v->type() == nInt) { - attrs2["type"] = "int"; - attrs2["value"] = (format("%1%") % v->integer).str(); - xml.writeEmptyElement("meta", attrs2); - } else if (v->type() == nFloat) { - attrs2["type"] = "float"; - attrs2["value"] = (format("%1%") % v->fpoint).str(); - xml.writeEmptyElement("meta", attrs2); - } else if (v->type() == nBool) { - attrs2["type"] = "bool"; - attrs2["value"] = v->boolean ? "true" : "false"; - xml.writeEmptyElement("meta", attrs2); - } else if (v->type() == nList) { - attrs2["type"] = "strings"; - XMLOpenElement m(xml, "meta", attrs2); - for (auto elem : v->listItems()) { - if (elem->type() != nString) continue; - XMLAttrs attrs3; - attrs3["value"] = elem->string.s; - xml.writeEmptyElement("string", attrs3); - } - } else if (v->type() == nAttrs) { - attrs2["type"] = "strings"; - XMLOpenElement m(xml, "meta", attrs2); - Bindings & attrs = *v->attrs; - for (auto &i : attrs) { - Attr & a(*attrs.find(i.name)); - if(a.value->type() != nString) continue; - XMLAttrs attrs3; - attrs3["type"] = i.name; - attrs3["value"] = a.value->string.s; - xml.writeEmptyElement("string", attrs3); + XMLOpenElement item(xml, "item", attrs); + DrvInfo::Outputs outputs = i.queryOutputs(printOutPath); + for (auto & j : outputs) { + XMLAttrs attrs2; + attrs2["name"] = j.first; + if (j.second) + attrs2["path"] = store.printStorePath(*j.second); + xml.writeEmptyElement("output", attrs2); + } + if (printMeta) { + StringSet metaNames = i.queryMetaNames(); + for (auto & j : metaNames) { + XMLAttrs attrs2; + attrs2["name"] = j; + Value * v = i.queryMeta(j); + if (!v) + printError( + "derivation '%s' has invalid meta attribute '%s'", + i.queryName(), j); + else { + if (v->type() == nString) { + attrs2["type"] = "string"; + attrs2["value"] = v->string.s; + xml.writeEmptyElement("meta", attrs2); + } else if (v->type() == nInt) { + attrs2["type"] = "int"; + attrs2["value"] = (format("%1%") % v->integer).str(); + xml.writeEmptyElement("meta", attrs2); + } else if (v->type() == nFloat) { + attrs2["type"] = "float"; + attrs2["value"] = (format("%1%") % v->fpoint).str(); + xml.writeEmptyElement("meta", attrs2); + } else if (v->type() == nBool) { + attrs2["type"] = "bool"; + attrs2["value"] = v->boolean ? "true" : "false"; + xml.writeEmptyElement("meta", attrs2); + } else if (v->type() == nList) { + attrs2["type"] = "strings"; + XMLOpenElement m(xml, "meta", attrs2); + for (auto elem : v->listItems()) { + if (elem->type() != nString) continue; + XMLAttrs attrs3; + attrs3["value"] = elem->string.s; + xml.writeEmptyElement("string", attrs3); } - } + } else if (v->type() == nAttrs) { + attrs2["type"] = "strings"; + XMLOpenElement m(xml, "meta", attrs2); + Bindings & attrs = *v->attrs; + for (auto &i : attrs) { + Attr & a(*attrs.find(i.name)); + if(a.value->type() != nString) continue; + XMLAttrs attrs3; + attrs3["type"] = globals.state->symbols[i.name]; + attrs3["value"] = a.value->string.s; + xml.writeEmptyElement("string", attrs3); + } } } } - } else - xml.writeEmptyElement("item", attrs); + } } else table.push_back(columns); @@ -1478,12 +1485,10 @@ static int main_nix_env(int argc, char * * argv) if (globals.profile == "") globals.profile = getDefaultProfile(); - op(globals, opFlags, opArgs); + op(globals, std::move(opFlags), std::move(opArgs)); globals.state->printStats(); - logger->stop(); - return 0; } } diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 37e4086cb..4b1202be3 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -38,8 +38,8 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, exist already. */ std::vector<StorePathWithOutputs> drvsToBuild; for (auto & i : elems) - if (i.queryDrvPath() != "") - drvsToBuild.push_back({state.store->parseStorePath(i.queryDrvPath())}); + if (auto drvPath = i.queryDrvPath()) + drvsToBuild.push_back({*drvPath}); debug(format("building user environment dependencies")); state.store->buildPaths( @@ -55,8 +55,8 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* Create a pseudo-derivation containing the name, system, output paths, and optionally the derivation path, as well as the meta attributes. */ - Path drvPath = keepDerivations ? i.queryDrvPath() : ""; - DrvInfo::Outputs outputs = i.queryOutputs(true); + std::optional<StorePath> drvPath = keepDerivations ? i.queryDrvPath() : std::nullopt; + DrvInfo::Outputs outputs = i.queryOutputs(true, true); StringSet metaNames = i.queryMetaNames(); auto attrs = state.buildBindings(7 + outputs.size()); @@ -66,9 +66,9 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, auto system = i.querySystem(); if (!system.empty()) attrs.alloc(state.sSystem).mkString(system); - attrs.alloc(state.sOutPath).mkString(i.queryOutPath()); - if (drvPath != "") - attrs.alloc(state.sDrvPath).mkString(i.queryDrvPath()); + attrs.alloc(state.sOutPath).mkString(state.store->printStorePath(i.queryOutPath())); + if (drvPath) + attrs.alloc(state.sDrvPath).mkString(state.store->printStorePath(*drvPath)); // Copy each output meant for installation. auto & vOutputs = attrs.alloc(state.sOutputs); @@ -76,15 +76,15 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, for (const auto & [m, j] : enumerate(outputs)) { (vOutputs.listElems()[m] = state.allocValue())->mkString(j.first); auto outputAttrs = state.buildBindings(2); - outputAttrs.alloc(state.sOutPath).mkString(j.second); + outputAttrs.alloc(state.sOutPath).mkString(state.store->printStorePath(*j.second)); attrs.alloc(j.first).mkAttrs(outputAttrs); /* This is only necessary when installing store paths, e.g., `nix-env -i /nix/store/abcd...-foo'. */ - state.store->addTempRoot(state.store->parseStorePath(j.second)); - state.store->ensurePath(state.store->parseStorePath(j.second)); + state.store->addTempRoot(*j.second); + state.store->ensurePath(*j.second); - references.insert(state.store->parseStorePath(j.second)); + references.insert(*j.second); } // Copy the meta attributes. @@ -99,14 +99,16 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, (manifest.listElems()[n++] = state.allocValue())->mkAttrs(attrs); - if (drvPath != "") references.insert(state.store->parseStorePath(drvPath)); + if (drvPath) references.insert(*drvPath); } /* Also write a copy of the list of user environment elements to the store; we need it for future modifications of the environment. */ + std::ostringstream str; + manifest.print(state.symbols, str, true); auto manifestFile = state.store->addTextToStore("env-manifest.nix", - fmt("%s", manifest), references); + str.str(), references); /* Get the environment builder expression. */ Value envBuilder; @@ -132,9 +134,9 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, state.forceValue(topLevel, [&]() { return topLevel.determinePos(noPos); }); PathSet context; Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath)); - auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(*aDrvPath.pos, *aDrvPath.value, context)); + auto topLevelDrv = state.coerceToStorePath(aDrvPath.pos, *aDrvPath.value, context); Attr & aOutPath(*topLevel.attrs->find(state.sOutPath)); - Path topLevelOut = state.coerceToPath(*aOutPath.pos, *aOutPath.value, context); + auto topLevelOut = state.coerceToStorePath(aOutPath.pos, *aOutPath.value, context); /* Realise the resulting store expression. */ debug("building user environment"); @@ -158,8 +160,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, } debug(format("switching to new user environment")); - Path generation = createGeneration(ref<LocalFSStore>(store2), profile, - store2->parseStorePath(topLevelOut)); + Path generation = createGeneration(ref<LocalFSStore>(store2), profile, topLevelOut); switchLink(profile, generation); } diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 12fc8baf9..6b5ba595d 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -31,7 +31,8 @@ void processExpr(EvalState & state, const Strings & attrPaths, bool evalOnly, OutputKind output, bool location, Expr * e) { if (parseOnly) { - std::cout << format("%1%\n") % *e; + e->show(state.symbols, std::cout); + std::cout << "\n"; return; } @@ -51,22 +52,25 @@ void processExpr(EvalState & state, const Strings & attrPaths, state.autoCallFunction(autoArgs, v, vRes); if (output == okXML) printValueAsXML(state, strict, location, vRes, std::cout, context, noPos); - else if (output == okJSON) + else if (output == okJSON) { printValueAsJSON(state, strict, vRes, v.determinePos(noPos), std::cout, context); - else { + std::cout << std::endl; + } else { if (strict) state.forceValueDeep(vRes); - std::cout << vRes << std::endl; + vRes.print(state.symbols, std::cout); + std::cout << std::endl; } } else { DrvInfos drvs; getDerivations(state, v, "", autoArgs, drvs, false); for (auto & i : drvs) { - Path drvPath = i.queryDrvPath(); + auto drvPath = i.requireDrvPath(); + auto drvPathS = state.store->printStorePath(drvPath); /* What output do we want? */ std::string outputName = i.queryOutputName(); if (outputName == "") - throw Error("derivation '%1%' lacks an 'outputName' attribute ", drvPath); + throw Error("derivation '%1%' lacks an 'outputName' attribute", drvPathS); if (gcRoot == "") printGCWarning(); @@ -75,9 +79,9 @@ void processExpr(EvalState & state, const Strings & attrPaths, if (++rootNr > 1) rootName += "-" + std::to_string(rootNr); auto store2 = state.store.dynamic_pointer_cast<LocalFSStore>(); if (store2) - drvPath = store2->addPermRoot(store2->parseStorePath(drvPath), rootName); + drvPathS = store2->addPermRoot(drvPath, rootName); } - std::cout << fmt("%s%s\n", drvPath, (outputName != "out" ? "!" + outputName : "")); + std::cout << fmt("%s%s\n", drvPathS, (outputName != "out" ? "!" + outputName : "")); } } } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 447774161..eb65e7dde 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -2,6 +2,10 @@ #include "derivations.hh" #include "dotgraph.hh" #include "globals.hh" +#include "build-result.hh" +#include "store-cast.hh" +#include "gc-store.hh" +#include "log-store.hh" #include "local-store.hh" #include "monitor-fd.hh" #include "serve-protocol.hh" @@ -427,11 +431,12 @@ static void opQuery(Strings opFlags, Strings opArgs) store->computeFSClosure( args, referrers, true, settings.gcKeepOutputs, settings.gcKeepDerivations); - Roots roots = store->findRoots(false); + auto & gcStore = require<GcStore>(*store); + Roots roots = gcStore.findRoots(false); for (auto & [target, links] : roots) if (referrers.find(target) != referrers.end()) for (auto & link : links) - cout << fmt("%1% -> %2%\n", link, store->printStorePath(target)); + cout << fmt("%1% -> %2%\n", link, gcStore.printStorePath(target)); break; } @@ -471,13 +476,15 @@ static void opReadLog(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); + auto & logStore = require<LogStore>(*store); + RunPager pager; for (auto & i : opArgs) { - auto path = store->followLinksToStorePath(i); - auto log = store->getBuildLog(path); + auto path = logStore.followLinksToStorePath(i); + auto log = logStore.getBuildLog(path); if (!log) - throw Error("build log of derivation '%s' is not available", store->printStorePath(path)); + throw Error("build log of derivation '%s' is not available", logStore.printStorePath(path)); std::cout << *log; } } @@ -587,20 +594,22 @@ static void opGC(Strings opFlags, Strings opArgs) if (!opArgs.empty()) throw UsageError("no arguments expected"); + auto & gcStore = require<GcStore>(*store); + if (printRoots) { - Roots roots = store->findRoots(false); + Roots roots = gcStore.findRoots(false); std::set<std::pair<Path, StorePath>> roots2; // Transpose and sort the roots. for (auto & [target, links] : roots) for (auto & link : links) roots2.emplace(link, target); for (auto & [link, target] : roots2) - std::cout << link << " -> " << store->printStorePath(target) << "\n"; + std::cout << link << " -> " << gcStore.printStorePath(target) << "\n"; } else { PrintFreed freed(options.action == GCOptions::gcDeleteDead, results); - store->collectGarbage(options, results); + gcStore.collectGarbage(options, results); if (options.action != GCOptions::gcDeleteDead) for (auto & i : results.paths) @@ -624,9 +633,11 @@ static void opDelete(Strings opFlags, Strings opArgs) for (auto & i : opArgs) options.pathsToDelete.insert(store->followLinksToStorePath(i)); + auto & gcStore = require<GcStore>(*store); + GCResults results; PrintFreed freed(true, results); - store->collectGarbage(options, results); + gcStore.collectGarbage(options, results); } @@ -911,7 +922,7 @@ static void opServe(Strings opFlags, Strings opArgs) if (GET_PROTOCOL_MINOR(clientVersion) >= 3) out << status.timesBuilt << status.isNonDeterministic << status.startTime << status.stopTime; - if (GET_PROTOCOL_MINOR(clientVersion >= 6)) { + if (GET_PROTOCOL_MINOR(clientVersion) >= 6) { worker_proto::write(*store, out, status.builtOutputs); } @@ -1082,9 +1093,7 @@ static int main_nix_store(int argc, char * * argv) if (op != opDump && op != opRestore) /* !!! hack */ store = openStore(); - op(opFlags, opArgs); - - logger->stop(); + op(std::move(opFlags), std::move(opArgs)); return 0; } diff --git a/src/nix/app.cc b/src/nix/app.cc index e104cc9c1..48de8fb82 100644 --- a/src/nix/app.cc +++ b/src/nix/app.cc @@ -4,6 +4,7 @@ #include "eval-cache.hh" #include "names.hh" #include "command.hh" +#include "derivations.hh" namespace nix { @@ -34,7 +35,7 @@ struct InstallableDerivedPath : Installable /** * Return the rewrites that are needed to resolve a string whose context is - * included in `dependencies` + * included in `dependencies`. */ StringPairs resolveRewrites(Store & store, const BuiltPaths dependencies) { @@ -50,7 +51,7 @@ StringPairs resolveRewrites(Store & store, const BuiltPaths dependencies) } /** - * Resolve the given string assuming the given context + * Resolve the given string assuming the given context. */ std::string resolveString(Store & store, const std::string & toResolve, const BuiltPaths dependencies) { @@ -60,17 +61,23 @@ std::string resolveString(Store & store, const std::string & toResolve, const Bu UnresolvedApp Installable::toApp(EvalState & state) { - auto [cursor, attrPath] = getCursor(state); + auto cursor = getCursor(state); + auto attrPath = cursor->getAttrPath(); auto type = cursor->getAttr("type")->getString(); + std::string expected = !attrPath.empty() && + (state.symbols[attrPath[0]] == "apps" || state.symbols[attrPath[0]] == "defaultApp") + ? "app" : "derivation"; + if (type != expected) + throw Error("attribute '%s' should have type '%s'", cursor->getAttrPathStr(), expected); + if (type == "app") { auto [program, context] = cursor->getAttr("program")->getStringWithContext(); - std::vector<StorePathWithOutputs> context2; for (auto & [path, name] : context) - context2.push_back({state.store->parseStorePath(path), {name}}); + context2.push_back({path, {name}}); return UnresolvedApp{App { .context = std::move(context2), @@ -84,7 +91,7 @@ UnresolvedApp Installable::toApp(EvalState & state) auto outputName = cursor->getAttr(state.sOutputName)->getString(); auto name = cursor->getAttr(state.sName)->getString(); auto aPname = cursor->maybeGetAttr("pname"); - auto aMeta = cursor->maybeGetAttr("meta"); + auto aMeta = cursor->maybeGetAttr(state.sMeta); auto aMainProgram = aMeta ? aMeta->maybeGetAttr("mainProgram") : nullptr; auto mainProgram = aMainProgram @@ -100,7 +107,7 @@ UnresolvedApp Installable::toApp(EvalState & state) } else - throw Error("attribute '%s' has unsupported type '%s'", attrPath, type); + throw Error("attribute '%s' has unsupported type '%s'", cursor->getAttrPathStr(), type); } // FIXME: move to libcmd @@ -114,7 +121,7 @@ App UnresolvedApp::resolve(ref<Store> evalStore, ref<Store> store) installableContext.push_back( std::make_shared<InstallableDerivedPath>(store, ctxElt.toDerivedPath())); - auto builtContext = build(evalStore, store, Realise::Outputs, installableContext); + auto builtContext = Installable::build(evalStore, store, Realise::Outputs, installableContext); res.program = resolveString(*store, unresolved.program, builtContext); if (!store->isInStore(res.program)) throw Error("app program '%s' is not in the Nix store", res.program); diff --git a/src/nix/build.cc b/src/nix/build.cc index 6e31757a2..9c648d28e 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -4,6 +4,7 @@ #include "shared.hh" #include "store-api.hh" #include "local-fs-store.hh" +#include "progress-bar.hh" #include <nlohmann/json.hpp> @@ -12,6 +13,7 @@ using namespace nix; struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile { Path outLink = "result"; + bool printOutputPaths = false; BuildMode buildMode = bmNormal; CmdBuild() @@ -32,6 +34,12 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile }); addFlag({ + .longName = "print-out-paths", + .description = "Print the resulting output paths", + .handler = {&printOutputPaths, true}, + }); + + addFlag({ .longName = "rebuild", .description = "Rebuild an already built package and compare the result to the existing store paths.", .handler = {&buildMode, bmCheck}, @@ -52,15 +60,26 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile void run(ref<Store> store) override { - auto buildables = build( + if (dryRun) { + std::vector<DerivedPath> pathsToBuild; + + for (auto & i : installables) { + auto b = i->toDerivedPaths(); + pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end()); + } + printMissing(store, pathsToBuild, lvlError); + if (json) + logger->cout("%s", derivedPathsToJSON(pathsToBuild, store).dump()); + return; + } + + auto buildables = Installable::build( getEvalStore(), store, - dryRun ? Realise::Derivation : Realise::Outputs, + Realise::Outputs, installables, buildMode); if (json) logger->cout("%s", derivedPathsWithHintsToJSON(buildables, store).dump()); - if (dryRun) return; - if (outLink != "") if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) for (const auto & [_i, buildable] : enumerate(buildables)) { @@ -82,6 +101,22 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile }, buildable.raw()); } + if (printOutputPaths) { + stopProgressBar(); + for (auto & buildable : buildables) { + std::visit(overloaded { + [&](const BuiltPath::Opaque & bo) { + std::cout << store->printStorePath(bo.path) << std::endl; + }, + [&](const BuiltPath::Built & bfd) { + for (auto & output : bfd.outputs) { + std::cout << store->printStorePath(output.second) << std::endl; + } + }, + }, buildable.raw()); + } + } + updateProfile(buildables); } }; diff --git a/src/nix/build.md b/src/nix/build.md index 20138b7e0..6a79f308c 100644 --- a/src/nix/build.md +++ b/src/nix/build.md @@ -25,6 +25,13 @@ R""( lrwxrwxrwx 1 … result-1 -> /nix/store/rkfrm0z6x6jmi7d3gsmma4j53h15mg33-cowsay-3.03+dfsg2 ``` +* Build GNU Hello and print the resulting store path. + + ```console + # nix build nixpkgs#hello --print-out-paths + /nix/store/v5sv61sszx301i0x6xysaqzla09nksnd-hello-2.10 + ``` + * Build a specific output: ```console diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 6b891a6ee..2e48e4c74 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -9,7 +9,7 @@ using namespace nix; struct CmdBundle : InstallableCommand { - std::string bundler = "github:matthewbauer/nix-bundle"; + std::string bundler = "github:NixOS/bundlers"; std::optional<Path> outLink; CmdBundle() @@ -75,10 +75,10 @@ struct CmdBundle : InstallableCommand auto val = installable->toValue(*evalState).first; - auto [bundlerFlakeRef, bundlerName] = parseFlakeRefWithFragment(bundler, absPath(".")); + auto [bundlerFlakeRef, bundlerName, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(bundler, absPath(".")); const flake::LockFlags lockFlags{ .writeLockFile = false }; InstallableFlake bundler{this, - evalState, std::move(bundlerFlakeRef), bundlerName, + evalState, std::move(bundlerFlakeRef), bundlerName, outputsSpec, {"bundlers." + settings.thisSystem.get() + ".default", "defaultBundler." + settings.thisSystem.get() }, @@ -97,21 +97,23 @@ struct CmdBundle : InstallableCommand throw Error("the bundler '%s' does not produce a derivation", bundler.what()); PathSet context2; - StorePath drvPath = store->parseStorePath(evalState->coerceToPath(*attr1->pos, *attr1->value, context2)); + auto drvPath = evalState->coerceToStorePath(attr1->pos, *attr1->value, context2); auto attr2 = vRes->attrs->get(evalState->sOutPath); if (!attr2) throw Error("the bundler '%s' does not produce a derivation", bundler.what()); - StorePath outPath = store->parseStorePath(evalState->coerceToPath(*attr2->pos, *attr2->value, context2)); + auto outPath = evalState->coerceToStorePath(attr2->pos, *attr2->value, context2); store->buildPaths({ DerivedPath::Built { drvPath } }); auto outPathS = store->printStorePath(outPath); if (!outLink) { - auto &attr = vRes->attrs->need(evalState->sName); - outLink = evalState->forceStringNoCtx(*attr.value,*attr.pos); + auto * attr = vRes->attrs->get(evalState->sName); + if (!attr) + throw Error("attribute 'name' missing"); + outLink = evalState->forceStringNoCtx(*attr->value, attr->pos); } // TODO: will crash if not a localFSStore? diff --git a/src/nix/bundle.md b/src/nix/bundle.md index 2bb70711f..a18161a3c 100644 --- a/src/nix/bundle.md +++ b/src/nix/bundle.md @@ -44,7 +44,7 @@ flake output attributes: * `bundlers.<system>.default` -If an attribute *name* is given, `nix run` tries the following flake +If an attribute *name* is given, `nix bundle` tries the following flake output attributes: * `bundlers.<system>.<name>` diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 92e31599a..4de109754 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -18,6 +18,9 @@ struct DevelopSettings : Config Setting<std::string> bashPrompt{this, "", "bash-prompt", "The bash prompt (`PS1`) in `nix develop` shells."}; + Setting<std::string> bashPromptPrefix{this, "", "bash-prompt-prefix", + "Prefix prepended to the `PS1` environment variable in `nix develop` shells."}; + Setting<std::string> bashPromptSuffix{this, "", "bash-prompt-suffix", "Suffix appended to the `PS1` environment variable in `nix develop` shells."}; }; @@ -196,21 +199,22 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore drv.inputSrcs.insert(std::move(getEnvShPath)); if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) { for (auto & output : drv.outputs) { - output.second = { - .output = DerivationOutputDeferred{}, - }; + output.second = DerivationOutput::Deferred {}, drv.env[output.first] = hashPlaceholder(output.first); } } else { for (auto & output : drv.outputs) { - output.second = { .output = DerivationOutputInputAddressed { .path = StorePath::dummy } }; + output.second = DerivationOutput::Deferred { }; drv.env[output.first] = ""; } - Hash h = std::get<0>(hashDerivationModulo(*evalStore, drv, true)); + auto hashesModulo = hashDerivationModulo(*evalStore, drv, true); for (auto & output : drv.outputs) { + Hash h = hashesModulo.hashes.at(output.first); auto outPath = store->makeOutputPath(output.first, h, drv.name); - output.second = { .output = DerivationOutputInputAddressed { .path = outPath } }; + output.second = DerivationOutput::InputAddressed { + .path = outPath, + }; drv.env[output.first] = store->printStorePath(outPath); } } @@ -242,6 +246,7 @@ struct Common : InstallableCommand, MixProfile "NIX_LOG_FD", "NIX_REMOTE", "PPID", + "SHELL", "SHELLOPTS", "SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt "TEMP", @@ -272,15 +277,27 @@ struct Common : InstallableCommand, MixProfile const BuildEnvironment & buildEnvironment, const Path & outputsDir = absPath(".") + "/outputs") { + // A list of colon-separated environment variables that should be + // prepended to, rather than overwritten, in order to keep the shell usable. + // Please keep this list minimal in order to avoid impurities. + static const char * const savedVars[] = { + "PATH", // for commands + "XDG_DATA_DIRS", // for loadable completion + }; + std::ostringstream out; out << "unset shellHook\n"; - out << "nix_saved_PATH=\"$PATH\"\n"; + for (auto & var : savedVars) { + out << fmt("%s=${%s:-}\n", var, var); + out << fmt("nix_saved_%s=\"$%s\"\n", var, var); + } buildEnvironment.toBash(out, ignoreVars); - out << "PATH=\"$PATH:$nix_saved_PATH\"\n"; + for (auto & var : savedVars) + out << fmt("%s=\"$%s:$nix_saved_%s\"\n", var, var, var); out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n"; for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"}) @@ -307,7 +324,7 @@ struct Common : InstallableCommand, MixProfile for (auto & [installable_, dir_] : redirects) { auto dir = absPath(dir_); auto installable = parseInstallable(store, installable_); - auto builtPaths = toStorePaths( + auto builtPaths = Installable::toStorePaths( getEvalStore(), store, Realise::Nothing, OperateOn::Output, {installable}); for (auto & path: builtPaths) { auto from = store->printStorePath(path); @@ -347,7 +364,7 @@ struct Common : InstallableCommand, MixProfile if (path && hasSuffix(path->to_string(), "-env")) return *path; else { - auto drvs = toDerivations(store, {installable}); + auto drvs = Installable::toDerivations(store, {installable}); if (drvs.size() != 1) throw Error("'%s' needs to evaluate to a single derivation, but it evaluated to %d derivations", @@ -481,6 +498,9 @@ struct CmdDevelop : Common, MixEnvironment if (developSettings.bashPrompt != "") script += fmt("[ -n \"$PS1\" ] && PS1=%s;\n", shellEscape(developSettings.bashPrompt.get())); + if (developSettings.bashPromptPrefix != "") + script += fmt("[ -n \"$PS1\" ] && PS1=%s\"$PS1\";\n", + shellEscape(developSettings.bashPromptPrefix.get())); if (developSettings.bashPromptSuffix != "") script += fmt("[ -n \"$PS1\" ] && PS1+=%s;\n", shellEscape(developSettings.bashPromptSuffix.get())); @@ -506,12 +526,25 @@ struct CmdDevelop : Common, MixEnvironment state, installable->nixpkgsFlakeRef(), "bashInteractive", + DefaultOutputs(), Strings{}, Strings{"legacyPackages." + settings.thisSystem.get() + "."}, nixpkgsLockFlags); - shell = store->printStorePath( - toStorePath(getEvalStore(), store, Realise::Outputs, OperateOn::Output, bashInstallable)) + "/bin/bash"; + bool found = false; + + for (auto & path : Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { + auto s = store->printStorePath(path) + "/bin/bash"; + if (pathExists(s)) { + shell = s; + found = true; + break; + } + } + + if (!found) + throw Error("package 'nixpkgs#bashInteractive' does not provide a 'bin/bash'"); + } catch (Error &) { ignoreException(); } diff --git a/src/nix/develop.md b/src/nix/develop.md index 8bcff66c9..4e8542d1b 100644 --- a/src/nix/develop.md +++ b/src/nix/develop.md @@ -66,6 +66,12 @@ R""( `nixpkgs#glibc` in `~/my-glibc` and want to compile another package against it. +* Run a series of script commands: + + ```console + # nix develop --command bash -c "mkdir build && cmake .. && make" + ``` + # Description `nix develop` starts a `bash` shell that provides an interactive build @@ -80,8 +86,8 @@ initialised by `stdenv` and exits. This build environment can be recorded into a profile using `--profile`. The prompt used by the `bash` shell can be customised by setting the -`bash-prompt` and `bash-prompt-suffix` settings in `nix.conf` or in -the flake's `nixConfig` attribute. +`bash-prompt`, `bash-prompt-prefix`, and `bash-prompt-suffix` settings in +`nix.conf` or in the flake's `nixConfig` attribute. # Flake output attributes diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc index 734c41e0e..0621d662c 100644 --- a/src/nix/diff-closures.cc +++ b/src/nix/diff-closures.cc @@ -131,9 +131,9 @@ struct CmdDiffClosures : SourceExprCommand void run(ref<Store> store) override { auto before = parseInstallable(store, _before); - auto beforePath = toStorePath(getEvalStore(), store, Realise::Outputs, operateOn, before); + auto beforePath = Installable::toStorePath(getEvalStore(), store, Realise::Outputs, operateOn, before); auto after = parseInstallable(store, _after); - auto afterPath = toStorePath(getEvalStore(), store, Realise::Outputs, operateOn, after); + auto afterPath = Installable::toStorePath(getEvalStore(), store, Realise::Outputs, operateOn, after); printClosureDiff(store, beforePath, afterPath, ""); } }; diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc index 4f3003448..ea87e3d87 100644 --- a/src/nix/doctor.cc +++ b/src/nix/doctor.cc @@ -24,12 +24,12 @@ std::string formatProtocol(unsigned int proto) } bool checkPass(const std::string & msg) { - logger->log(ANSI_GREEN "[PASS] " ANSI_NORMAL + msg); + notice(ANSI_GREEN "[PASS] " ANSI_NORMAL + msg); return true; } bool checkFail(const std::string & msg) { - logger->log(ANSI_RED "[FAIL] " ANSI_NORMAL + msg); + notice(ANSI_RED "[FAIL] " ANSI_NORMAL + msg); return false; } diff --git a/src/nix/edit.cc b/src/nix/edit.cc index fc48db0d7..76a134b1f 100644 --- a/src/nix/edit.cc +++ b/src/nix/edit.cc @@ -28,19 +28,19 @@ struct CmdEdit : InstallableCommand { auto state = getEvalState(); - auto [v, pos] = installable->toValue(*state); + const auto [file, line] = [&] { + auto [v, pos] = installable->toValue(*state); - try { - pos = findPackageFilename(*state, *v, installable->what()); - } catch (NoPositionInfo &) { - } - - if (pos == noPos) - throw Error("cannot find position information for '%s", installable->what()); + try { + return findPackageFilename(*state, *v, installable->what()); + } catch (NoPositionInfo &) { + throw Error("cannot find position information for '%s", installable->what()); + } + }(); stopProgressBar(); - auto args = editorFor(pos); + auto args = editorFor(file, line); restoreProcessContext(); diff --git a/src/nix/edit.md b/src/nix/edit.md index 80563d06b..89bd09abf 100644 --- a/src/nix/edit.md +++ b/src/nix/edit.md @@ -24,8 +24,8 @@ this attribute to the location of the definition of the `meta.description`, `version` or `name` derivation attributes. The editor to invoke is specified by the `EDITOR` environment -variable. It defaults to `cat`. If the editor is `emacs`, `nano` or -`vim`, it is passed the line number of the derivation using the -argument `+<lineno>`. +variable. It defaults to `cat`. If the editor is `emacs`, `nano`, +`vim` or `kak`, it is passed the line number of the derivation using +the argument `+<lineno>`. )"" diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 8cd04d5fe..ddd2790c6 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -16,7 +16,7 @@ struct CmdEval : MixJSON, InstallableCommand std::optional<std::string> apply; std::optional<Path> writeTo; - CmdEval() + CmdEval() : InstallableCommand(true /* supportReadOnlyMode */) { addFlag({ .longName = "raw", @@ -77,9 +77,9 @@ struct CmdEval : MixJSON, InstallableCommand if (pathExists(*writeTo)) throw Error("path '%s' already exists", *writeTo); - std::function<void(Value & v, const Pos & pos, const Path & path)> recurse; + std::function<void(Value & v, const PosIdx pos, const Path & path)> recurse; - recurse = [&](Value & v, const Pos & pos, const Path & path) + recurse = [&](Value & v, const PosIdx pos, const Path & path) { state->forceValue(v, pos); if (v.type() == nString) @@ -88,18 +88,22 @@ struct CmdEval : MixJSON, InstallableCommand else if (v.type() == nAttrs) { if (mkdir(path.c_str(), 0777) == -1) throw SysError("creating directory '%s'", path); - for (auto & attr : *v.attrs) + for (auto & attr : *v.attrs) { + std::string_view name = state->symbols[attr.name]; try { - if (attr.name == "." || attr.name == "..") - throw Error("invalid file name '%s'", attr.name); - recurse(*attr.value, *attr.pos, path + "/" + std::string(attr.name)); + if (name == "." || name == "..") + throw Error("invalid file name '%s'", name); + recurse(*attr.value, attr.pos, concatStrings(path, "/", name)); } catch (Error & e) { - e.addTrace(*attr.pos, hintfmt("while evaluating the attribute '%s'", attr.name)); + e.addTrace( + state->positions[attr.pos], + hintfmt("while evaluating the attribute '%s'", name)); throw; } + } } else - throw TypeError("value at '%s' is not a string or an attribute set", pos); + throw TypeError("value at '%s' is not a string or an attribute set", state->positions[pos]); }; recurse(*v, pos, *writeTo); @@ -112,12 +116,13 @@ struct CmdEval : MixJSON, InstallableCommand else if (json) { JSONPlaceholder jsonOut(std::cout); - printValueAsJSON(*state, true, *v, pos, jsonOut, context); + printValueAsJSON(*state, true, *v, pos, jsonOut, context, false); + std::cout << std::endl; } else { state->forceValueDeep(*v); - logger->cout("%s", *v); + logger->cout("%s", printValue(*state, *v)); } } }; diff --git a/src/nix/flake-update.md b/src/nix/flake-update.md index 03b50e38e..2ee8a707d 100644 --- a/src/nix/flake-update.md +++ b/src/nix/flake-update.md @@ -6,7 +6,7 @@ R""( lock file: ```console - # nix flake update + # nix flake update --commit-lock-file * Updated 'nix': 'github:NixOS/nix/9fab14adbc3810d5cc1f88672fde1eee4358405c' -> 'github:NixOS/nix/8927cba62f5afb33b01016d5c4f7f8b7d0adde3c' * Updated 'nixpkgs': 'github:NixOS/nixpkgs/3d2d8f281a27d466fa54b469b5993f7dde198375' -> 'github:NixOS/nixpkgs/a3a3dda3bacf61e8a39258a0ed9c924eeca8e293' … diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 14a235501..3967f1102 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -50,9 +50,9 @@ public: return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags); } - std::optional<FlakeRef> getFlakeRefForCompletion() override + std::vector<std::string> getFlakesForCompletion() override { - return getFlakeRef(); + return {flakeUrl}; } }; @@ -123,7 +123,7 @@ struct CmdFlakeLock : FlakeCommand }; static void enumerateOutputs(EvalState & state, Value & vFlake, - std::function<void(const std::string & name, Value & vProvide, const Pos & pos)> callback) + std::function<void(const std::string & name, Value & vProvide, const PosIdx pos)> callback) { auto pos = vFlake.determinePos(noPos); state.forceAttrs(vFlake, pos); @@ -139,11 +139,11 @@ static void enumerateOutputs(EvalState & state, Value & vFlake, else. This way we can disable IFD for hydraJobs and then enable it for other outputs. */ if (auto attr = aOutputs->value->attrs->get(sHydraJobs)) - callback(attr->name, *attr->value, *attr->pos); + callback(state.symbols[attr->name], *attr->value, attr->pos); for (auto & attr : *aOutputs->value->attrs) { if (attr.name != sHydraJobs) - callback(attr.name, *attr.value, *attr.pos); + callback(state.symbols[attr.name], *attr.value, attr.pos); } } @@ -212,7 +212,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON ANSI_BOLD "Last modified:" ANSI_NORMAL " %s", std::put_time(std::localtime(&*lastModified), "%F %T")); - logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL); + if (!lockedFlake.lockFile.root->inputs.empty()) + logger->cout(ANSI_BOLD "Inputs:" ANSI_NORMAL); std::unordered_set<std::shared_ptr<Node>> visited; @@ -254,14 +255,6 @@ struct CmdFlakeInfo : CmdFlakeMetadata } }; -static bool argHasName(std::string_view arg, std::string_view expected) -{ - return - arg == expected - || arg == "_" - || (hasPrefix(arg, "_") && arg.substr(1) == expected); -} - struct CmdFlakeCheck : FlakeCommand { bool build = true; @@ -315,21 +308,33 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: rewrite to use EvalCache. - auto checkSystemName = [&](const std::string & system, const Pos & pos) { + auto resolve = [&] (PosIdx p) { + return state->positions[p]; + }; + + auto argHasName = [&] (Symbol arg, std::string_view expected) { + std::string_view name = state->symbols[arg]; + return + name == expected + || name == "_" + || (hasPrefix(name, "_") && name.substr(1) == expected); + }; + + auto checkSystemName = [&](const std::string & system, const PosIdx pos) { // FIXME: what's the format of "system"? if (system.find('-') == std::string::npos) - reportError(Error("'%s' is not a valid system type, at %s", system, pos)); + reportError(Error("'%s' is not a valid system type, at %s", system, resolve(pos))); }; - auto checkDerivation = [&](const std::string & attrPath, Value & v, const Pos & pos) -> std::optional<StorePath> { + auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional<StorePath> { try { auto drvInfo = getDerivation(*state, v, false); if (!drvInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); // FIXME: check meta attributes - return std::make_optional(store->parseStorePath(drvInfo->queryDrvPath())); + return drvInfo->queryDrvPath(); } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the derivation '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); reportError(e); } return std::nullopt; @@ -337,7 +342,7 @@ struct CmdFlakeCheck : FlakeCommand std::vector<DerivedPath> drvPaths; - auto checkApp = [&](const std::string & attrPath, Value & v, const Pos & pos) { + auto checkApp = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { #if 0 // FIXME @@ -348,12 +353,12 @@ struct CmdFlakeCheck : FlakeCommand } #endif } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the app definition '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the app definition '%s'", attrPath)); reportError(e); } }; - auto checkOverlay = [&](const std::string & attrPath, Value & v, const Pos & pos) { + auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { state->forceValue(v, pos); if (!v.isLambda() @@ -368,12 +373,12 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the overlay '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the overlay '%s'", attrPath)); reportError(e); } }; - auto checkModule = [&](const std::string & attrPath, Value & v, const Pos & pos) { + auto checkModule = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { state->forceValue(v, pos); if (v.isLambda()) { @@ -382,9 +387,11 @@ struct CmdFlakeCheck : FlakeCommand } else if (v.type() == nAttrs) { for (auto & attr : *v.attrs) try { - state->forceValue(*attr.value, *attr.pos); + state->forceValue(*attr.value, attr.pos); } catch (Error & e) { - e.addTrace(*attr.pos, hintfmt("while evaluating the option '%s'", attr.name)); + e.addTrace( + state->positions[attr.pos], + hintfmt("while evaluating the option '%s'", state->symbols[attr.name])); throw; } } else @@ -392,14 +399,14 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: if we have a 'nixpkgs' input, use it to // check the module. } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the NixOS module '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); reportError(e); } }; - std::function<void(const std::string & attrPath, Value & v, const Pos & pos)> checkHydraJobs; + std::function<void(const std::string & attrPath, Value & v, const PosIdx pos)> checkHydraJobs; - checkHydraJobs = [&](const std::string & attrPath, Value & v, const Pos & pos) { + checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { state->forceAttrs(v, pos); @@ -407,23 +414,23 @@ struct CmdFlakeCheck : FlakeCommand throw Error("jobset should not be a derivation at top-level"); for (auto & attr : *v.attrs) { - state->forceAttrs(*attr.value, *attr.pos); - auto attrPath2 = attrPath + "." + (std::string) attr.name; + state->forceAttrs(*attr.value, attr.pos); + auto attrPath2 = concatStrings(attrPath, ".", state->symbols[attr.name]); if (state->isDerivation(*attr.value)) { Activity act(*logger, lvlChatty, actUnknown, fmt("checking Hydra job '%s'", attrPath2)); - checkDerivation(attrPath2, *attr.value, *attr.pos); + checkDerivation(attrPath2, *attr.value, attr.pos); } else - checkHydraJobs(attrPath2, *attr.value, *attr.pos); + checkHydraJobs(attrPath2, *attr.value, attr.pos); } } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the Hydra jobset '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the Hydra jobset '%s'", attrPath)); reportError(e); } }; - auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const Pos & pos) { + auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlChatty, actUnknown, fmt("checking NixOS configuration '%s'", attrPath)); @@ -433,12 +440,12 @@ struct CmdFlakeCheck : FlakeCommand if (!state->isDerivation(*vToplevel)) throw Error("attribute 'config.system.build.toplevel' is not a derivation"); } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the NixOS configuration '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the NixOS configuration '%s'", attrPath)); reportError(e); } }; - auto checkTemplate = [&](const std::string & attrPath, Value & v, const Pos & pos) { + auto checkTemplate = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlChatty, actUnknown, fmt("checking template '%s'", attrPath)); @@ -448,7 +455,7 @@ struct CmdFlakeCheck : FlakeCommand if (auto attr = v.attrs->get(state->symbols.create("path"))) { if (attr->name == state->symbols.create("path")) { PathSet context; - auto path = state->coerceToPath(*attr->pos, *attr->value, context); + auto path = state->coerceToPath(attr->pos, *attr->value, context); if (!store->isInStore(path)) throw Error("template '%s' has a bad 'path' attribute"); // TODO: recursively check the flake in 'path'. @@ -457,29 +464,29 @@ struct CmdFlakeCheck : FlakeCommand throw Error("template '%s' lacks attribute 'path'", attrPath); if (auto attr = v.attrs->get(state->symbols.create("description"))) - state->forceStringNoCtx(*attr->value, *attr->pos); + state->forceStringNoCtx(*attr->value, attr->pos); else throw Error("template '%s' lacks attribute 'description'", attrPath); for (auto & attr : *v.attrs) { - std::string name(attr.name); - if (name != "path" && name != "description") + std::string_view name(state->symbols[attr.name]); + if (name != "path" && name != "description" && name != "welcomeText") throw Error("template '%s' has unsupported attribute '%s'", attrPath, name); } } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); reportError(e); } }; - auto checkBundler = [&](const std::string & attrPath, Value & v, const Pos & pos) { + auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { state->forceValue(v, pos); if (!v.isLambda()) throw Error("bundler must be a function"); // TODO: check types of inputs/outputs? } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); reportError(e); } }; @@ -492,7 +499,7 @@ struct CmdFlakeCheck : FlakeCommand enumerateOutputs(*state, *vFlake, - [&](const std::string & name, Value & vOutput, const Pos & pos) { + [&](const std::string & name, Value & vOutput, const PosIdx pos) { Activity act(*logger, lvlChatty, actUnknown, fmt("checking flake output '%s'", name)); @@ -503,11 +510,12 @@ struct CmdFlakeCheck : FlakeCommand std::string_view replacement = name == "defaultPackage" ? "packages.<system>.default" : - name == "defaultApps" ? "apps.<system>.default" : + name == "defaultApp" ? "apps.<system>.default" : name == "defaultTemplate" ? "templates.default" : name == "defaultBundler" ? "bundlers.<system>.default" : name == "overlay" ? "overlays.default" : name == "devShell" ? "devShells.<system>.default" : + name == "nixosModule" ? "nixosModules.default" : ""; if (replacement != "") warn("flake output attribute '%s' is deprecated; use '%s' instead", name, replacement); @@ -515,66 +523,82 @@ struct CmdFlakeCheck : FlakeCommand if (name == "checks") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); - state->forceAttrs(*attr.value, *attr.pos); + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + state->forceAttrs(*attr.value, attr.pos); for (auto & attr2 : *attr.value->attrs) { auto drvPath = checkDerivation( - fmt("%s.%s.%s", name, attr.name, attr2.name), - *attr2.value, *attr2.pos); - if (drvPath && (std::string) attr.name == settings.thisSystem.get()) + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + if (drvPath && attr_name == settings.thisSystem.get()) drvPaths.push_back(DerivedPath::Built{*drvPath}); } } } + else if (name == "formatter") { + state->forceAttrs(vOutput, pos); + for (auto & attr : *vOutput.attrs) { + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + checkApp( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + } + } + else if (name == "packages" || name == "devShells") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); - state->forceAttrs(*attr.value, *attr.pos); + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + state->forceAttrs(*attr.value, attr.pos); for (auto & attr2 : *attr.value->attrs) checkDerivation( - fmt("%s.%s.%s", name, attr.name, attr2.name), - *attr2.value, *attr2.pos); + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); } } else if (name == "apps") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); - state->forceAttrs(*attr.value, *attr.pos); + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + state->forceAttrs(*attr.value, attr.pos); for (auto & attr2 : *attr.value->attrs) checkApp( - fmt("%s.%s.%s", name, attr.name, attr2.name), - *attr2.value, *attr2.pos); + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); } } else if (name == "defaultPackage" || name == "devShell") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); checkDerivation( - fmt("%s.%s", name, attr.name), - *attr.value, *attr.pos); + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); } } else if (name == "defaultApp") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); checkApp( - fmt("%s.%s", name, attr.name), - *attr.value, *attr.pos); + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); } } else if (name == "legacyPackages") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); + checkSystemName(state->symbols[attr.name], attr.pos); // FIXME: do getDerivations? } } @@ -585,8 +609,8 @@ struct CmdFlakeCheck : FlakeCommand else if (name == "overlays") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) - checkOverlay(fmt("%s.%s", name, attr.name), - *attr.value, *attr.pos); + checkOverlay(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); } else if (name == "nixosModule") @@ -595,15 +619,15 @@ struct CmdFlakeCheck : FlakeCommand else if (name == "nixosModules") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) - checkModule(fmt("%s.%s", name, attr.name), - *attr.value, *attr.pos); + checkModule(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); } else if (name == "nixosConfigurations") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) - checkNixOSConfiguration(fmt("%s.%s", name, attr.name), - *attr.value, *attr.pos); + checkNixOSConfiguration(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); } else if (name == "hydraJobs") @@ -615,29 +639,31 @@ struct CmdFlakeCheck : FlakeCommand else if (name == "templates") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) - checkTemplate(fmt("%s.%s", name, attr.name), - *attr.value, *attr.pos); + checkTemplate(fmt("%s.%s", name, state->symbols[attr.name]), + *attr.value, attr.pos); } else if (name == "defaultBundler") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); checkBundler( - fmt("%s.%s", name, attr.name), - *attr.value, *attr.pos); + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); } } else if (name == "bundlers") { state->forceAttrs(vOutput, pos); for (auto & attr : *vOutput.attrs) { - checkSystemName(attr.name, *attr.pos); - state->forceAttrs(*attr.value, *attr.pos); + const auto & attr_name = state->symbols[attr.name]; + checkSystemName(attr_name, attr.pos); + state->forceAttrs(*attr.value, attr.pos); for (auto & attr2 : *attr.value->attrs) { checkBundler( - fmt("%s.%s.%s", name, attr.name, attr2.name), - *attr2.value, *attr2.pos); + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); } } } @@ -646,7 +672,7 @@ struct CmdFlakeCheck : FlakeCommand warn("unknown flake output '%s'", name); } catch (Error & e) { - e.addTrace(pos, hintfmt("while checking flake output '%s'", name)); + e.addTrace(resolve(pos), hintfmt("while checking flake output '%s'", name)); reportError(e); } }); @@ -699,18 +725,24 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment(templateUrl, absPath(".")); auto installable = InstallableFlake(nullptr, - evalState, std::move(templateFlakeRef), templateName, + evalState, std::move(templateFlakeRef), templateName, DefaultOutputs(), defaultTemplateAttrPaths, defaultTemplateAttrPathsPrefixes, lockFlags); - auto [cursor, attrPath] = installable.getCursor(*evalState); + auto cursor = installable.getCursor(*evalState); - auto templateDir = cursor->getAttr("path")->getString(); + auto templateDirAttr = cursor->getAttr("path"); + auto templateDir = templateDirAttr->getString(); - assert(store->isInStore(templateDir)); + if (!store->isInStore(templateDir)) + throw TypeError( + "'%s' was not found in the Nix store\n" + "If you've set '%s' to a string, try using a path instead.", + templateDir, templateDirAttr->getAttrPathStr()); - std::vector<Path> files; + std::vector<Path> changedFiles; + std::vector<Path> conflictedFiles; std::function<void(const Path & from, const Path & to)> copyDir; copyDir = [&](const Path & from, const Path & to) @@ -727,31 +759,41 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto contents = readFile(from2); if (pathExists(to2)) { auto contents2 = readFile(to2); - if (contents != contents2) - throw Error("refusing to overwrite existing file '%s'", to2); + if (contents != contents2) { + printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2); + conflictedFiles.push_back(to2); + } else { + notice("skipping identical file: %s", from2); + } + continue; } else writeFile(to2, contents); } else if (S_ISLNK(st.st_mode)) { auto target = readLink(from2); if (pathExists(to2)) { - if (readLink(to2) != target) - throw Error("refusing to overwrite existing symlink '%s'", to2); + if (readLink(to2) != target) { + printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2); + conflictedFiles.push_back(to2); + } else { + notice("skipping identical file: %s", from2); + } + continue; } else createSymlink(target, to2); } else throw Error("file '%s' has unsupported type", from2); - files.push_back(to2); + changedFiles.push_back(to2); notice("wrote: %s", to2); } }; copyDir(templateDir, flakeDir); - if (pathExists(flakeDir + "/.git")) { + if (!changedFiles.empty() && pathExists(flakeDir + "/.git")) { Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" }; - for (auto & s : files) args.push_back(s); + for (auto & s : changedFiles) args.push_back(s); runProgram("git", true, args); } auto welcomeText = cursor->maybeGetAttr("welcomeText"); @@ -759,6 +801,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand notice("\n"); notice(renderMarkdownToTerminal(welcomeText->getString())); } + + if (!conflictedFiles.empty()) + throw Error("Encountered %d conflicts - see above", conflictedFiles.size()); } }; @@ -956,8 +1001,11 @@ struct CmdFlakeShow : FlakeCommand, MixJSON { auto j = nlohmann::json::object(); + auto attrPathS = state->symbols.resolve(attrPath); + Activity act(*logger, lvlInfo, actUnknown, - fmt("evaluating '%s'", concatStringsSep(".", attrPath))); + fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); + try { auto recurse = [&]() { @@ -965,14 +1013,15 @@ struct CmdFlakeShow : FlakeCommand, MixJSON logger->cout("%s", headerPrefix); auto attrs = visitor.getAttrs(); for (const auto & [i, attr] : enumerate(attrs)) { + const auto & attrName = state->symbols[attr]; bool last = i + 1 == attrs.size(); - auto visitor2 = visitor.getAttr(attr); + auto visitor2 = visitor.getAttr(attrName); auto attrPath2(attrPath); attrPath2.push_back(attr); auto j2 = visit(*visitor2, attrPath2, - fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attr), + fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attrName), nextPrefix + (last ? treeNull : treeLine)); - if (json) j.emplace(attr, std::move(j2)); + if (json) j.emplace(attrName, std::move(j2)); } }; @@ -981,8 +1030,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON auto name = visitor.getAttr(state->sName)->getString(); if (json) { std::optional<std::string> description; - if (auto aMeta = visitor.maybeGetAttr("meta")) { - if (auto aDescription = aMeta->maybeGetAttr("description")) + if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) { + if (auto aDescription = aMeta->maybeGetAttr(state->sDescription)) description = aDescription->getString(); } j.emplace("type", "derivation"); @@ -992,10 +1041,10 @@ struct CmdFlakeShow : FlakeCommand, MixJSON } else { logger->cout("%s: %s '%s'", headerPrefix, - attrPath.size() == 2 && attrPath[0] == "devShell" ? "development environment" : - attrPath.size() >= 2 && attrPath[0] == "devShells" ? "development environment" : - attrPath.size() == 3 && attrPath[0] == "checks" ? "derivation" : - attrPath.size() >= 1 && attrPath[0] == "hydraJobs" ? "derivation" : + attrPath.size() == 2 && attrPathS[0] == "devShell" ? "development environment" : + attrPath.size() >= 2 && attrPathS[0] == "devShells" ? "development environment" : + attrPath.size() == 3 && attrPathS[0] == "checks" ? "derivation" : + attrPath.size() >= 1 && attrPathS[0] == "hydraJobs" ? "derivation" : "package", name); } @@ -1003,26 +1052,27 @@ struct CmdFlakeShow : FlakeCommand, MixJSON if (attrPath.size() == 0 || (attrPath.size() == 1 && ( - attrPath[0] == "defaultPackage" - || attrPath[0] == "devShell" - || attrPath[0] == "nixosConfigurations" - || attrPath[0] == "nixosModules" - || attrPath[0] == "defaultApp" - || attrPath[0] == "templates" - || attrPath[0] == "overlays")) + attrPathS[0] == "defaultPackage" + || attrPathS[0] == "devShell" + || attrPathS[0] == "formatter" + || attrPathS[0] == "nixosConfigurations" + || attrPathS[0] == "nixosModules" + || attrPathS[0] == "defaultApp" + || attrPathS[0] == "templates" + || attrPathS[0] == "overlays")) || ((attrPath.size() == 1 || attrPath.size() == 2) - && (attrPath[0] == "checks" - || attrPath[0] == "packages" - || attrPath[0] == "devShells" - || attrPath[0] == "apps")) + && (attrPathS[0] == "checks" + || attrPathS[0] == "packages" + || attrPathS[0] == "devShells" + || attrPathS[0] == "apps")) ) { recurse(); } else if ( - (attrPath.size() == 2 && (attrPath[0] == "defaultPackage" || attrPath[0] == "devShell")) - || (attrPath.size() == 3 && (attrPath[0] == "checks" || attrPath[0] == "packages" || attrPath[0] == "devShells")) + (attrPath.size() == 2 && (attrPathS[0] == "defaultPackage" || attrPathS[0] == "devShell" || attrPathS[0] == "formatter")) + || (attrPath.size() == 3 && (attrPathS[0] == "checks" || attrPathS[0] == "packages" || attrPathS[0] == "devShells")) ) { if (visitor.isDerivation()) @@ -1031,19 +1081,23 @@ struct CmdFlakeShow : FlakeCommand, MixJSON throw Error("expected a derivation"); } - else if (attrPath.size() > 0 && attrPath[0] == "hydraJobs") { + else if (attrPath.size() > 0 && attrPathS[0] == "hydraJobs") { if (visitor.isDerivation()) showDerivation(); else recurse(); } - else if (attrPath.size() > 0 && attrPath[0] == "legacyPackages") { + else if (attrPath.size() > 0 && attrPathS[0] == "legacyPackages") { if (attrPath.size() == 1) recurse(); - else if (!showLegacy) - logger->warn(fmt("%s: " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix)); - else { + else if (!showLegacy){ + if (!json) + logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix)); + else { + logger->warn(fmt("%s omitted (use '--legacy' to show)", concatStringsSep(".", attrPathS))); + } + } else { if (visitor.isDerivation()) showDerivation(); else if (attrPath.size() <= 2) @@ -1053,8 +1107,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON } else if ( - (attrPath.size() == 2 && attrPath[0] == "defaultApp") || - (attrPath.size() == 3 && attrPath[0] == "apps")) + (attrPath.size() == 2 && attrPathS[0] == "defaultApp") || + (attrPath.size() == 3 && attrPathS[0] == "apps")) { auto aType = visitor.maybeGetAttr("type"); if (!aType || aType->getString() != "app") @@ -1067,8 +1121,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON } else if ( - (attrPath.size() == 1 && attrPath[0] == "defaultTemplate") || - (attrPath.size() == 2 && attrPath[0] == "templates")) + (attrPath.size() == 1 && attrPathS[0] == "defaultTemplate") || + (attrPath.size() == 2 && attrPathS[0] == "templates")) { auto description = visitor.getAttr("description")->getString(); if (json) { @@ -1081,11 +1135,11 @@ struct CmdFlakeShow : FlakeCommand, MixJSON else { auto [type, description] = - (attrPath.size() == 1 && attrPath[0] == "overlay") - || (attrPath.size() == 2 && attrPath[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") : - attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") : - (attrPath.size() == 1 && attrPath[0] == "nixosModule") - || (attrPath.size() == 2 && attrPath[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") : + (attrPath.size() == 1 && attrPathS[0] == "overlay") + || (attrPath.size() == 2 && attrPathS[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") : + attrPath.size() == 2 && attrPathS[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") : + (attrPath.size() == 1 && attrPathS[0] == "nixosModule") + || (attrPath.size() == 2 && attrPathS[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") : std::make_pair("unknown", "unknown"); if (json) { j.emplace("type", type); @@ -1094,7 +1148,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON } } } catch (EvalError & e) { - if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages")) + if (!(attrPath.size() > 0 && attrPathS[0] == "legacyPackages")) throw; } diff --git a/src/nix/flake.md b/src/nix/flake.md index d59915eeb..a1ab43281 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -153,7 +153,7 @@ Currently the `type` attribute can be one of the following: git(+http|+https|+ssh|+git|+file|):(//<server>)?<path>(\?<params>)? ``` - The `ref` attribute defaults to `master`. + The `ref` attribute defaults to resolving the `HEAD` reference. The `rev` attribute must denote a commit that exists in the branch or tag specified by the `ref` attribute, since Nix doesn't do a full @@ -161,6 +161,11 @@ Currently the `type` attribute can be one of the following: doesn't allow fetching a `rev` without a known `ref`). The default is the commit currently pointed to by `ref`. + When `git+file` is used without specifying `ref` or `rev`, files are + fetched directly from the local `path` as long as they have been added + to the Git repository. If there are uncommitted changes, the reference + is treated as dirty and a warning is printed. + For example, the following are valid Git flake references: * `git+https://example.org/my/repo` @@ -176,9 +181,17 @@ Currently the `type` attribute can be one of the following: * `tarball`: Tarballs. The location of the tarball is specified by the attribute `url`. - In URL form, the schema must be `http://`, `https://` or `file://` - URLs and the extension must be `.zip`, `.tar`, `.tar.gz`, `.tar.xz`, - `.tar.bz2` or `.tar.zst`. + In URL form, the schema must be `tarball+http://`, `tarball+https://` or `tarball+file://`. + If the extension corresponds to a known archive format (`.zip`, `.tar`, + `.tgz`, `.tar.gz`, `.tar.xz`, `.tar.bz2` or `.tar.zst`), then the `tarball+` + can be dropped. + +* `file`: Plain files or directory tarballs, either over http(s) or from the local + disk. + + In URL form, the schema must be `file+http://`, `file+https://` or `file+file://`. + If the extension doesn’t correspond to a known archive format (as defined by the + `tarball` fetcher), then the `file+` prefix can be dropped. * `github`: A more efficient way to fetch repositories from GitHub. The following attributes are required: @@ -326,9 +339,10 @@ The following attributes are supported in `flake.nix`: * `nixConfig`: a set of `nix.conf` options to be set when evaluating any part of a flake. In the interests of security, only a small set of - whitelisted options (currently `bash-prompt`, `bash-prompt-suffix`, - and `flake-registry`) are allowed to be set without confirmation so long as - `accept-flake-config` is not set in the global configuration. + whitelisted options (currently `bash-prompt`, `bash-prompt-prefix`, + `bash-prompt-suffix`, and `flake-registry`) are allowed to be set without + confirmation so long as `accept-flake-config` is not set in the global + configuration. ## Flake inputs diff --git a/src/nix/fmt.cc b/src/nix/fmt.cc new file mode 100644 index 000000000..6f6a4a632 --- /dev/null +++ b/src/nix/fmt.cc @@ -0,0 +1,54 @@ +#include "command.hh" +#include "run.hh" + +using namespace nix; + +struct CmdFmt : SourceExprCommand { + std::vector<std::string> args; + + CmdFmt() { expectArgs({.label = "args", .handler = {&args}}); } + + std::string description() override { + return "reformat your code in the standard style"; + } + + std::string doc() override { + return + #include "fmt.md" + ; + } + + Category category() override { return catSecondary; } + + Strings getDefaultFlakeAttrPaths() override { + return Strings{"formatter." + settings.thisSystem.get()}; + } + + Strings getDefaultFlakeAttrPathPrefixes() override { return Strings{}; } + + void run(ref<Store> store) override + { + auto evalState = getEvalState(); + auto evalStore = getEvalStore(); + + auto installable = parseInstallable(store, "."); + auto app = installable->toApp(*evalState).resolve(evalStore, store); + + Strings programArgs{app.program}; + + // Propagate arguments from the CLI + if (args.empty()) { + // Format the current flake out of the box + programArgs.push_back("."); + } else { + // User wants more power, let them decide which paths to include/exclude + for (auto &i : args) { + programArgs.push_back(i); + } + } + + runProgramInStore(store, app.program, programArgs); + }; +}; + +static auto r2 = registerCommand<CmdFmt>("fmt"); diff --git a/src/nix/fmt.md b/src/nix/fmt.md new file mode 100644 index 000000000..1c78bb36f --- /dev/null +++ b/src/nix/fmt.md @@ -0,0 +1,53 @@ +R""( + +# Examples + +With [nixpkgs-fmt](https://github.com/nix-community/nixpkgs-fmt): + +```nix +# flake.nix +{ + outputs = { nixpkgs, self }: { + formatter.x86_64-linux = nixpkgs.legacyPackages.x86_64-linux.nixpkgs-fmt; + }; +} +``` + +- Format the current flake: `$ nix fmt` + +- Format a specific folder or file: `$ nix fmt ./folder ./file.nix` + +With [nixfmt](https://github.com/serokell/nixfmt): + +```nix +# flake.nix +{ + outputs = { nixpkgs, self }: { + formatter.x86_64-linux = nixpkgs.legacyPackages.x86_64-linux.nixfmt; + }; +} +``` + +- Format specific files: `$ nix fmt ./file1.nix ./file2.nix` + +With [Alejandra](https://github.com/kamadorueda/alejandra): + +```nix +# flake.nix +{ + outputs = { nixpkgs, self }: { + formatter.x86_64-linux = nixpkgs.legacyPackages.x86_64-linux.alejandra; + }; +} +``` + +- Format the current flake: `$ nix fmt` + +- Format a specific folder or file: `$ nix fmt ./folder ./file.nix` + +# Description + +`nix fmt` will rewrite all Nix files (\*.nix) to a canonical format +using the formatter specified in your flake. + +)"" diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh index 42c806450..a7a8a01b9 100644 --- a/src/nix/get-env.sh +++ b/src/nix/get-env.sh @@ -43,6 +43,7 @@ __dumpEnv() { local __var_name="${BASH_REMATCH[2]}" if [[ $__var_name =~ ^BASH_ || \ + $__var_name =~ ^COMP_ || \ $__var_name = _ || \ $__var_name = DIRSTACK || \ $__var_name = EUID || \ @@ -54,7 +55,9 @@ __dumpEnv() { $__var_name = PWD || \ $__var_name = RANDOM || \ $__var_name = SHLVL || \ - $__var_name = SECONDS \ + $__var_name = SECONDS || \ + $__var_name = EPOCHREALTIME || \ + $__var_name = EPOCHSECONDS \ ]]; then continue; fi if [[ -z $__first ]]; then printf ',\n'; else __first=; fi diff --git a/src/nix/key-generate-secret.md b/src/nix/key-generate-secret.md index 4938f637c..609b1abcc 100644 --- a/src/nix/key-generate-secret.md +++ b/src/nix/key-generate-secret.md @@ -30,7 +30,7 @@ convert-secret-to-public` to get the corresponding public key for verifying signed store paths. The mandatory argument `--key-name` specifies a key name (such as -`cache.example.org-1). It is used to look up keys on the client when +`cache.example.org-1`). It is used to look up keys on the client when it verifies signatures. It can be anything, but it’s suggested to use the host name of your cache (e.g. `cache.example.org`) with a suffix denoting the number of the key (to be incremented every time you need diff --git a/src/nix/log.cc b/src/nix/log.cc index fd3c1d787..72d02ef11 100644 --- a/src/nix/log.cc +++ b/src/nix/log.cc @@ -2,6 +2,7 @@ #include "common-args.hh" #include "shared.hh" #include "store-api.hh" +#include "log-store.hh" #include "progress-bar.hh" using namespace nix; @@ -34,17 +35,24 @@ struct CmdLog : InstallableCommand RunPager pager; for (auto & sub : subs) { + auto * logSubP = dynamic_cast<LogStore *>(&*sub); + if (!logSubP) { + printInfo("Skipped '%s' which does not support retrieving build logs", sub->getUri()); + continue; + } + auto & logSub = *logSubP; + auto log = std::visit(overloaded { [&](const DerivedPath::Opaque & bo) { - return sub->getBuildLog(bo.path); + return logSub.getBuildLog(bo.path); }, [&](const DerivedPath::Built & bfd) { - return sub->getBuildLog(bfd.drvPath); + return logSub.getBuildLog(bfd.drvPath); }, }, b.raw()); if (!log) continue; stopProgressBar(); - printInfo("got build log for '%s' from '%s'", installable->what(), sub->getUri()); + printInfo("got build log for '%s' from '%s'", installable->what(), logSub.getUri()); std::cout << *log; return; } diff --git a/src/nix/main.cc b/src/nix/main.cc index b923f2535..2c6309c81 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -53,7 +53,6 @@ static bool haveInternet() } std::string programPath; -char * * savedArgv; struct HelpRequested { }; @@ -74,6 +73,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs addFlag({ .longName = "help", .description = "Show usage information.", + .category = miscCategory, .handler = {[&]() { throw HelpRequested(); }}, }); @@ -82,12 +82,13 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs .shortName = 'L', .description = "Print full build logs on standard error.", .category = loggingCategory, - .handler = {[&]() {setLogFormat(LogFormat::barWithLogs); }}, + .handler = {[&]() { logger->setPrintBuildLogs(true); }}, }); addFlag({ .longName = "version", .description = "Show version information.", + .category = miscCategory, .handler = {[&]() { showVersion = true; }}, }); @@ -95,12 +96,14 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs .longName = "offline", .aliases = {"no-net"}, // FIXME: remove .description = "Disable substituters and consider all previously downloaded files up-to-date.", + .category = miscCategory, .handler = {[&]() { useNet = false; }}, }); addFlag({ .longName = "refresh", .description = "Consider all previously downloaded files out-of-date.", + .category = miscCategory, .handler = {[&]() { refresh = true; }}, }); } @@ -117,7 +120,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs {"hash-path", {"hash", "path"}}, {"ls-nar", {"nar", "ls"}}, {"ls-store", {"store", "ls"}}, - {"make-content-addressable", {"store", "make-content-addressable"}}, + {"make-content-addressable", {"store", "make-content-addressed"}}, {"optimise-store", {"store", "optimise"}}, {"ping-store", {"store", "ping"}}, {"sign-paths", {"store", "sign"}}, @@ -187,7 +190,7 @@ static void showHelp(std::vector<std::string> subcommand, MultiCommand & topleve *vUtils); auto attrs = state.buildBindings(16); - attrs.alloc("command").mkString(toplevel.toJSON().dump()); + attrs.alloc("toplevel").mkString(toplevel.toJSON().dump()); auto vRes = state.allocValue(); state.callFunction(*vGenerateManpage, state.allocValue()->mkAttrs(attrs), *vRes, noPos); @@ -261,9 +264,16 @@ void mainWrapped(int argc, char * * argv) } #endif + Finally f([] { logger->stop(); }); + programPath = argv[0]; auto programName = std::string(baseNameOf(programPath)); + if (argc > 1 && std::string_view(argv[1]) == "__build-remote") { + programName = "build-remote"; + argv++; argc--; + } + { auto legacy = (*RegisterLegacyCommand::commands)[programName]; if (legacy) return legacy(argc, argv); @@ -279,8 +289,6 @@ void mainWrapped(int argc, char * * argv) verbosity = lvlInfo; } - Finally f([] { logger->stop(); }); - NixArgs args; if (argc == 2 && std::string(argv[1]) == "__dump-args") { @@ -289,6 +297,7 @@ void mainWrapped(int argc, char * * argv) } if (argc == 2 && std::string(argv[1]) == "__dump-builtins") { + settings.experimentalFeatures = {Xp::Flakes, Xp::FetchClosure}; evalSettings.pureEval = false; EvalState state({}, openStore("dummy://")); auto res = nlohmann::json::object(); @@ -301,7 +310,7 @@ void mainWrapped(int argc, char * * argv) b["arity"] = primOp->arity; b["args"] = primOp->args; b["doc"] = trim(stripIndentation(primOp->doc)); - res[(std::string) builtin.name] = std::move(b); + res[state.symbols[builtin.name]] = std::move(b); } std::cout << res.dump() << "\n"; return; @@ -319,7 +328,7 @@ void mainWrapped(int argc, char * * argv) std::cout << "attrs\n"; break; } for (auto & s : *completions) - std::cout << s.completion << "\t" << s.description << "\n"; + std::cout << s.completion << "\t" << trim(s.description) << "\n"; } }); @@ -341,7 +350,10 @@ void mainWrapped(int argc, char * * argv) if (!completions) throw; } - if (completions) return; + if (completions) { + args.completionHook(); + return; + } if (args.showVersion) { printVersion(programName); @@ -379,6 +391,9 @@ void mainWrapped(int argc, char * * argv) settings.ttlPositiveNarInfoCache = 0; } + if (args.command->second->forceImpureByDefault() && !evalSettings.pureEval.overridden) { + evalSettings.pureEval = false; + } args.command->second->prepare(); args.command->second->run(); } diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc deleted file mode 100644 index 2e75a3b61..000000000 --- a/src/nix/make-content-addressable.cc +++ /dev/null @@ -1,102 +0,0 @@ -#include "command.hh" -#include "store-api.hh" -#include "references.hh" -#include "common-args.hh" -#include "json.hh" - -using namespace nix; - -struct CmdMakeContentAddressable : StorePathsCommand, MixJSON -{ - CmdMakeContentAddressable() - { - realiseMode = Realise::Outputs; - } - - std::string description() override - { - return "rewrite a path or closure to content-addressed form"; - } - - std::string doc() override - { - return - #include "make-content-addressable.md" - ; - } - - void run(ref<Store> store, StorePaths && storePaths) override - { - auto paths = store->topoSortPaths(StorePathSet(storePaths.begin(), storePaths.end())); - - std::reverse(paths.begin(), paths.end()); - - std::map<StorePath, StorePath> remappings; - - auto jsonRoot = json ? std::make_unique<JSONObject>(std::cout) : nullptr; - auto jsonRewrites = json ? std::make_unique<JSONObject>(jsonRoot->object("rewrites")) : nullptr; - - for (auto & path : paths) { - auto pathS = store->printStorePath(path); - auto oldInfo = store->queryPathInfo(path); - std::string oldHashPart(path.hashPart()); - - StringSink sink; - store->narFromPath(path, sink); - - StringMap rewrites; - - StorePathSet references; - bool hasSelfReference = false; - for (auto & ref : oldInfo->references) { - if (ref == path) - hasSelfReference = true; - else { - auto i = remappings.find(ref); - auto replacement = i != remappings.end() ? i->second : ref; - // FIXME: warn about unremapped paths? - if (replacement != ref) - rewrites.insert_or_assign(store->printStorePath(ref), store->printStorePath(replacement)); - references.insert(std::move(replacement)); - } - } - - sink.s = rewriteStrings(sink.s, rewrites); - - HashModuloSink hashModuloSink(htSHA256, oldHashPart); - hashModuloSink(sink.s); - - auto narHash = hashModuloSink.finish().first; - - ValidPathInfo info { - store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference), - narHash, - }; - info.references = std::move(references); - if (hasSelfReference) info.references.insert(info.path); - info.narSize = sink.s.size(); - info.ca = FixedOutputHash { - .method = FileIngestionMethod::Recursive, - .hash = info.narHash, - }; - - if (!json) - notice("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path)); - - auto source = sinkToSource([&](Sink & nextSink) { - RewritingSink rsink2(oldHashPart, std::string(info.path.hashPart()), nextSink); - rsink2(sink.s); - rsink2.flush(); - }); - - store->addToStore(info, *source); - - if (json) - jsonRewrites->attr(store->printStorePath(path), store->printStorePath(info.path)); - - remappings.insert_or_assign(std::move(path), std::move(info.path)); - } - } -}; - -static auto rCmdMakeContentAddressable = registerCommand2<CmdMakeContentAddressable>({"store", "make-content-addressable"}); diff --git a/src/nix/make-content-addressed.cc b/src/nix/make-content-addressed.cc new file mode 100644 index 000000000..34860c38f --- /dev/null +++ b/src/nix/make-content-addressed.cc @@ -0,0 +1,55 @@ +#include "command.hh" +#include "store-api.hh" +#include "make-content-addressed.hh" +#include "common-args.hh" +#include "json.hh" + +using namespace nix; + +struct CmdMakeContentAddressed : virtual CopyCommand, virtual StorePathsCommand, MixJSON +{ + CmdMakeContentAddressed() + { + realiseMode = Realise::Outputs; + } + + std::string description() override + { + return "rewrite a path or closure to content-addressed form"; + } + + std::string doc() override + { + return + #include "make-content-addressed.md" + ; + } + + void run(ref<Store> srcStore, StorePaths && storePaths) override + { + auto dstStore = dstUri.empty() ? openStore() : openStore(dstUri); + + auto remappings = makeContentAddressed(*srcStore, *dstStore, + StorePathSet(storePaths.begin(), storePaths.end())); + + if (json) { + JSONObject jsonRoot(std::cout); + JSONObject jsonRewrites(jsonRoot.object("rewrites")); + for (auto & path : storePaths) { + auto i = remappings.find(path); + assert(i != remappings.end()); + jsonRewrites.attr(srcStore->printStorePath(path), srcStore->printStorePath(i->second)); + } + } else { + for (auto & path : storePaths) { + auto i = remappings.find(path); + assert(i != remappings.end()); + notice("rewrote '%s' to '%s'", + srcStore->printStorePath(path), + srcStore->printStorePath(i->second)); + } + } + } +}; + +static auto rCmdMakeContentAddressed = registerCommand2<CmdMakeContentAddressed>({"store", "make-content-addressed"}); diff --git a/src/nix/make-content-addressable.md b/src/nix/make-content-addressed.md index 3dd847edc..32eecc880 100644 --- a/src/nix/make-content-addressable.md +++ b/src/nix/make-content-addressed.md @@ -5,7 +5,7 @@ R""( * Create a content-addressed representation of the closure of GNU Hello: ```console - # nix store make-content-addressable -r nixpkgs#hello + # nix store make-content-addressed nixpkgs#hello … rewrote '/nix/store/v5sv61sszx301i0x6xysaqzla09nksnd-hello-2.10' to '/nix/store/5skmmcb9svys5lj3kbsrjg7vf2irid63-hello-2.10' ``` @@ -22,14 +22,14 @@ R""( ```console # nix copy --to /tmp/nix --trusted-public-keys '' nixpkgs#hello - cannot add path '/nix/store/zy9wbxwcygrwnh8n2w9qbbcr6zk87m26-libunistring-0.9.10' because it lacks a valid signature + cannot add path '/nix/store/zy9wbxwcygrwnh8n2w9qbbcr6zk87m26-libunistring-0.9.10' because it lacks a signature by a trusted key ``` * Create a content-addressed representation of the current NixOS system closure: ```console - # nix store make-content-addressable -r /run/current-system + # nix store make-content-addressed /run/current-system ``` # Description diff --git a/src/nix/nix.md b/src/nix/nix.md index 0dacadee6..d48682a94 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -146,6 +146,51 @@ For most commands, if no installable is specified, the default is `.`, i.e. Nix will operate on the default flake output attribute of the flake in the current directory. +## Derivation output selection + +Derivations can have multiple outputs, each corresponding to a +different store path. For instance, a package can have a `bin` output +that contains programs, and a `dev` output that provides development +artifacts like C/C++ header files. The outputs on which `nix` commands +operate are determined as follows: + +* You can explicitly specify the desired outputs using the syntax + *installable*`^`*output1*`,`*...*`,`*outputN*. For example, you can + obtain the `dev` and `static` outputs of the `glibc` package: + + ```console + # nix build 'nixpkgs#glibc^dev,static' + # ls ./result-dev/include/ ./result-static/lib/ + … + ``` + +* You can also specify that *all* outputs should be used using the + syntax *installable*`^*`. For example, the following shows the size + of all outputs of the `glibc` package in the binary cache: + + ```console + # nix path-info -S --eval-store auto --store https://cache.nixos.org 'nixpkgs#glibc^*' + /nix/store/g02b1lpbddhymmcjb923kf0l7s9nww58-glibc-2.33-123 33208200 + /nix/store/851dp95qqiisjifi639r0zzg5l465ny4-glibc-2.33-123-bin 36142896 + /nix/store/kdgs3q6r7xdff1p7a9hnjr43xw2404z7-glibc-2.33-123-debug 155787312 + /nix/store/n4xa8h6pbmqmwnq0mmsz08l38abb06zc-glibc-2.33-123-static 42488328 + /nix/store/q6580lr01jpcsqs4r5arlh4ki2c1m9rv-glibc-2.33-123-dev 44200560 + ``` + +* If you didn't specify the desired outputs, but the derivation has an + attribute `meta.outputsToInstall`, Nix will use those outputs. For + example, since the package `nixpkgs#libxml2` has this attribute: + + ```console + # nix eval 'nixpkgs#libxml2.meta.outputsToInstall' + [ "bin" "man" ] + ``` + + a command like `nix shell nixpkgs#libxml2` will provide only those + two outputs by default. + +* Otherwise, Nix will use all outputs of the derivation. + # Nix stores Most `nix` subcommands operate on a *Nix store*. diff --git a/src/nix/path-from-hash-part.cc b/src/nix/path-from-hash-part.cc new file mode 100644 index 000000000..7f7cda8d3 --- /dev/null +++ b/src/nix/path-from-hash-part.cc @@ -0,0 +1,39 @@ +#include "command.hh" +#include "store-api.hh" + +using namespace nix; + +struct CmdPathFromHashPart : StoreCommand +{ + std::string hashPart; + + CmdPathFromHashPart() + { + expectArgs({ + .label = "hash-part", + .handler = {&hashPart}, + }); + } + + std::string description() override + { + return "get a store path from its hash part"; + } + + std::string doc() override + { + return + #include "path-from-hash-part.md" + ; + } + + void run(ref<Store> store) override + { + if (auto storePath = store->queryPathFromHashPart(hashPart)) + logger->cout(store->printStorePath(*storePath)); + else + throw Error("there is no store path corresponding to '%s'", hashPart); + } +}; + +static auto rCmdPathFromHashPart = registerCommand2<CmdPathFromHashPart>({"store", "path-from-hash-part"}); diff --git a/src/nix/path-from-hash-part.md b/src/nix/path-from-hash-part.md new file mode 100644 index 000000000..788e13ab6 --- /dev/null +++ b/src/nix/path-from-hash-part.md @@ -0,0 +1,20 @@ +R""( + +# Examples + +* Return the full store path with the given hash part: + + ```console + # nix store path-from-hash-part --store https://cache.nixos.org/ 0i2jd68mp5g6h2sa5k9c85rb80sn8hi9 + /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10 + ``` + +# Description + +Given the hash part of a store path (that is, the 32 characters +following `/nix/store/`), return the full store path. This is +primarily useful in the implementation of binary caches, where a +request for a `.narinfo` file only supplies the hash part +(e.g. `https://cache.nixos.org/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9.narinfo`). + +)"" diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index f2dd44ba4..ce3288dc1 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -199,11 +199,13 @@ static int main_nix_prefetch_url(int argc, char * * argv) state->forceAttrs(v, noPos); /* Extract the URL. */ - auto & attr = v.attrs->need(state->symbols.create("urls")); - state->forceList(*attr.value, noPos); - if (attr.value->listSize() < 1) + auto * attr = v.attrs->get(state->symbols.create("urls")); + if (!attr) + throw Error("attribute 'urls' missing"); + state->forceList(*attr->value, noPos); + if (attr->value->listSize() < 1) throw Error("'urls' list is empty"); - url = state->forceString(*attr.value->listElems()[0]); + url = state->forceString(*attr->value->listElems()[0]); /* Extract the hash mode. */ auto attr2 = v.attrs->get(state->symbols.create("outputHashMode")); diff --git a/src/nix/profile-install.md b/src/nix/profile-install.md index e3009491e..aed414963 100644 --- a/src/nix/profile-install.md +++ b/src/nix/profile-install.md @@ -20,6 +20,13 @@ R""( # nix profile install nixpkgs/d73407e8e6002646acfdef0e39ace088bacc83da#hello ``` +* Install a specific output of a package: + + ```console + # nix profile install nixpkgs#bash^man + ``` + + # Description This command adds *installables* to a Nix profile. diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 0e8dc4380..3814e7d5a 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -22,13 +22,13 @@ struct ProfileElementSource // FIXME: record original attrpath. FlakeRef resolvedRef; std::string attrPath; - // FIXME: output names + OutputsSpec outputs; bool operator < (const ProfileElementSource & other) const { return - std::pair(originalRef.to_string(), attrPath) < - std::pair(other.originalRef.to_string(), other.attrPath); + std::tuple(originalRef.to_string(), attrPath, outputs) < + std::tuple(other.originalRef.to_string(), other.attrPath, other.outputs); } }; @@ -37,12 +37,12 @@ struct ProfileElement StorePathSet storePaths; std::optional<ProfileElementSource> source; bool active = true; - // FIXME: priority + int priority = 5; std::string describe() const { if (source) - return fmt("%s#%s", source->originalRef, source->attrPath); + return fmt("%s#%s%s", source->originalRef, source->attrPath, printOutputsSpec(source->outputs)); StringSet names; for (auto & path : storePaths) names.insert(DrvName(path.name()).name); @@ -61,6 +61,25 @@ struct ProfileElement { return std::tuple(describe(), storePaths) < std::tuple(other.describe(), other.storePaths); } + + void updateStorePaths( + ref<Store> evalStore, + ref<Store> store, + const BuiltPaths & builtPaths) + { + storePaths.clear(); + for (auto & buildable : builtPaths) { + std::visit(overloaded { + [&](const BuiltPath::Opaque & bo) { + storePaths.insert(bo.path); + }, + [&](const BuiltPath::Built & bfd) { + for (auto & output : bfd.outputs) + storePaths.insert(output.second); + }, + }, buildable.raw()); + } + } }; struct ProfileManifest @@ -77,19 +96,35 @@ struct ProfileManifest auto json = nlohmann::json::parse(readFile(manifestPath)); auto version = json.value("version", 0); - if (version != 1) - throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version); + std::string sUrl; + std::string sOriginalUrl; + switch (version) { + case 1: + sUrl = "uri"; + sOriginalUrl = "originalUri"; + break; + case 2: + sUrl = "url"; + sOriginalUrl = "originalUrl"; + break; + default: + throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version); + } for (auto & e : json["elements"]) { ProfileElement element; for (auto & p : e["storePaths"]) element.storePaths.insert(state.store->parseStorePath((std::string) p)); element.active = e["active"]; - if (e.value("uri", "") != "") { - element.source = ProfileElementSource{ - parseFlakeRef(e["originalUri"]), - parseFlakeRef(e["uri"]), - e["attrPath"] + if(e.contains("priority")) { + element.priority = e["priority"]; + } + if (e.value(sUrl, "") != "") { + element.source = ProfileElementSource { + parseFlakeRef(e[sOriginalUrl]), + parseFlakeRef(e[sUrl]), + e["attrPath"], + e["outputs"].get<OutputsSpec>() }; } elements.emplace_back(std::move(element)); @@ -105,7 +140,7 @@ struct ProfileManifest for (auto & drvInfo : drvInfos) { ProfileElement element; - element.storePaths = {state.store->parseStorePath(drvInfo.queryOutPath())}; + element.storePaths = {drvInfo.queryOutPath()}; elements.emplace_back(std::move(element)); } } @@ -121,15 +156,17 @@ struct ProfileManifest nlohmann::json obj; obj["storePaths"] = paths; obj["active"] = element.active; + obj["priority"] = element.priority; if (element.source) { - obj["originalUri"] = element.source->originalRef.to_string(); - obj["uri"] = element.source->resolvedRef.to_string(); + obj["originalUrl"] = element.source->originalRef.to_string(); + obj["url"] = element.source->resolvedRef.to_string(); obj["attrPath"] = element.source->attrPath; + obj["outputs"] = element.source->outputs; } array.push_back(obj); } nlohmann::json json; - json["version"] = 1; + json["version"] = 2; json["elements"] = array; return json.dump(); } @@ -144,7 +181,7 @@ struct ProfileManifest for (auto & element : elements) { for (auto & path : element.storePaths) { if (element.active) - pkgs.emplace_back(store->printStorePath(path), true, 5); + pkgs.emplace_back(store->printStorePath(path), true, element.priority); references.insert(path); } } @@ -214,8 +251,29 @@ struct ProfileManifest } }; +static std::map<Installable *, BuiltPaths> +builtPathsPerInstallable( + const std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> & builtPaths) +{ + std::map<Installable *, BuiltPaths> res; + for (auto & [installable, builtPath] : builtPaths) + res[installable.get()].push_back(builtPath); + return res; +} + struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile { + std::optional<int64_t> priority; + + CmdProfileInstall() { + addFlag({ + .longName = "priority", + .description = "The priority of the package to install.", + .labels = {"priority"}, + .handler = {&priority}, + }); + }; + std::string description() override { return "install a package into a profile"; @@ -232,53 +290,38 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile { ProfileManifest manifest(*getEvalState(), *profile); - std::vector<DerivedPath> pathsToBuild; + auto builtPaths = builtPathsPerInstallable( + Installable::build2( + getEvalStore(), store, Realise::Outputs, installables, bmNormal)); for (auto & installable : installables) { + ProfileElement element; + + + if (auto installable2 = std::dynamic_pointer_cast<InstallableFlake>(installable)) { + // FIXME: make build() return this? auto [attrPath, resolvedRef, drv] = installable2->toDerivation(); - - ProfileElement element; - if (!drv.outPath) - throw UnimplementedError("CA derivations are not yet supported by 'nix profile'"); - element.storePaths = {*drv.outPath}; // FIXME - element.source = ProfileElementSource{ + element.source = ProfileElementSource { installable2->flakeRef, resolvedRef, attrPath, + installable2->outputsSpec }; - pathsToBuild.push_back(DerivedPath::Built{drv.drvPath, StringSet{drv.outputName}}); - - manifest.elements.emplace_back(std::move(element)); - } else { - auto buildables = build(getEvalStore(), store, Realise::Outputs, {installable}, bmNormal); - - for (auto & buildable : buildables) { - ProfileElement element; - - std::visit(overloaded { - [&](const BuiltPath::Opaque & bo) { - pathsToBuild.push_back(bo); - element.storePaths.insert(bo.path); - }, - [&](const BuiltPath::Built & bfd) { - // TODO: Why are we querying if we know the output - // names already? Is it just to figure out what the - // default one is? - for (auto & output : store->queryDerivationOutputMap(bfd.drvPath)) { - pathsToBuild.push_back(DerivedPath::Built{bfd.drvPath, {output.first}}); - element.storePaths.insert(output.second); - } - }, - }, buildable.raw()); - - manifest.elements.emplace_back(std::move(element)); + if(drv.priority) { + element.priority = *drv.priority; } } - } - store->buildPaths(pathsToBuild); + if(priority) { // if --priority was specified we want to override the priority of the installable + element.priority = *priority; + }; + + element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()]); + + manifest.elements.push_back(std::move(element)); + } updateProfile(manifest.build(store)); } @@ -373,15 +416,15 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem if (removedCount == 0) { for (auto matcher: matchers) { - if (const size_t* index = std::get_if<size_t>(&matcher)){ - warn("'%d' is not a valid index in profile", *index); - } else if (const Path* path = std::get_if<Path>(&matcher)){ - warn("'%s' does not match any paths in profile", *path); - } else if (const RegexPattern* regex = std::get_if<RegexPattern>(&matcher)){ - warn("'%s' does not match any packages in profile", regex->pattern); + if (const size_t * index = std::get_if<size_t>(&matcher)){ + warn("'%d' is not a valid index", *index); + } else if (const Path * path = std::get_if<Path>(&matcher)){ + warn("'%s' does not match any paths", *path); + } else if (const RegexPattern * regex = std::get_if<RegexPattern>(&matcher)){ + warn("'%s' does not match any packages", regex->pattern); } } - warn ("Try `nix profile list` to see the current profile."); + warn ("Use 'nix profile list' to see the current profile."); } updateProfile(newManifest.build(store)); } @@ -407,8 +450,8 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf auto matchers = getMatchers(store); - // FIXME: code duplication - std::vector<DerivedPath> pathsToBuild; + std::vector<std::shared_ptr<Installable>> installables; + std::vector<size_t> indices; auto upgradedCount = 0; @@ -423,49 +466,57 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf Activity act(*logger, lvlChatty, actUnknown, fmt("checking '%s' for updates", element.source->attrPath)); - InstallableFlake installable( + auto installable = std::make_shared<InstallableFlake>( this, getEvalState(), FlakeRef(element.source->originalRef), "", - {element.source->attrPath}, - {}, + element.source->outputs, + Strings{element.source->attrPath}, + Strings{}, lockFlags); - auto [attrPath, resolvedRef, drv] = installable.toDerivation(); + auto [attrPath, resolvedRef, drv] = installable->toDerivation(); if (element.source->resolvedRef == resolvedRef) continue; printInfo("upgrading '%s' from flake '%s' to '%s'", element.source->attrPath, element.source->resolvedRef, resolvedRef); - if (!drv.outPath) - throw UnimplementedError("CA derivations are not yet supported by 'nix profile'"); - element.storePaths = {*drv.outPath}; // FIXME - element.source = ProfileElementSource{ - installable.flakeRef, + element.source = ProfileElementSource { + installable->flakeRef, resolvedRef, attrPath, + installable->outputsSpec }; - pathsToBuild.push_back(DerivedPath::Built{drv.drvPath, {drv.outputName}}); + installables.push_back(installable); + indices.push_back(i); } } if (upgradedCount == 0) { for (auto & matcher : matchers) { - if (const size_t* index = std::get_if<size_t>(&matcher)){ - warn("'%d' is not a valid index in profile", *index); - } else if (const Path* path = std::get_if<Path>(&matcher)){ - warn("'%s' does not match any paths in profile", *path); - } else if (const RegexPattern* regex = std::get_if<RegexPattern>(&matcher)){ - warn("'%s' does not match any packages in profile", regex->pattern); + if (const size_t * index = std::get_if<size_t>(&matcher)){ + warn("'%d' is not a valid index", *index); + } else if (const Path * path = std::get_if<Path>(&matcher)){ + warn("'%s' does not match any paths", *path); + } else if (const RegexPattern * regex = std::get_if<RegexPattern>(&matcher)){ + warn("'%s' does not match any packages", regex->pattern); } } warn ("Use 'nix profile list' to see the current profile."); } - store->buildPaths(pathsToBuild); + auto builtPaths = builtPathsPerInstallable( + Installable::build2( + getEvalStore(), store, Realise::Outputs, installables, bmNormal)); + + for (size_t i = 0; i < installables.size(); ++i) { + auto & installable = installables.at(i); + auto & element = manifest.elements[indices.at(i)]; + element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()]); + } updateProfile(manifest.build(store)); } @@ -492,8 +543,8 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); logger->cout("%d %s %s %s", i, - element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath : "-", - element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath : "-", + element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath + printOutputsSpec(element.source->outputs) : "-", + element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath + printOutputsSpec(element.source->outputs) : "-", concatStringsSep(" ", store->printStorePathSet(element.storePaths))); } } diff --git a/src/nix/profile.md b/src/nix/profile.md index 0a4ff2fa9..be3c5ba1a 100644 --- a/src/nix/profile.md +++ b/src/nix/profile.md @@ -11,7 +11,7 @@ them to be rolled back easily. The default profile used by `nix profile` is `$HOME/.nix-profile`, which, if it does not exist, is created as a symlink to -`/nix/var/nix/profiles/per-user/default` if Nix is invoked by the +`/nix/var/nix/profiles/default` if Nix is invoked by the `root` user, or `/nix/var/nix/profiles/per-user/`*username* otherwise. You can specify another profile location using `--profile` *path*. @@ -70,7 +70,7 @@ are installed in this version of the profile. It looks like this: { "active": true, "attrPath": "legacyPackages.x86_64-linux.zoom-us", - "originalUri": "flake:nixpkgs", + "originalUrl": "flake:nixpkgs", "storePaths": [ "/nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927" ], @@ -84,11 +84,11 @@ are installed in this version of the profile. It looks like this: Each object in the array `elements` denotes an installed package and has the following fields: -* `originalUri`: The [flake reference](./nix3-flake.md) specified by +* `originalUrl`: The [flake reference](./nix3-flake.md) specified by the user at the time of installation (e.g. `nixpkgs`). This is also the flake reference that will be used by `nix profile upgrade`. -* `uri`: The immutable flake reference to which `originalUri` +* `uri`: The immutable flake reference to which `originalUrl` resolved. * `attrPath`: The flake output attribute that provided this diff --git a/src/nix/registry.md b/src/nix/registry.md index d5c9ef442..bd3575d1b 100644 --- a/src/nix/registry.md +++ b/src/nix/registry.md @@ -29,7 +29,7 @@ highest precedence: can be specified using the NixOS option `nix.registry`. * The user registry `~/.config/nix/registry.json`. This registry can - be modified by commands such as `nix flake pin`. + be modified by commands such as `nix registry pin`. * Overrides specified on the command line using the option `--override-flake`. diff --git a/src/nix/repl.md b/src/nix/repl.md index 9b6f2bee3..c5113be61 100644 --- a/src/nix/repl.md +++ b/src/nix/repl.md @@ -24,10 +24,34 @@ R""( * Interact with Nixpkgs in the REPL: ```console - # nix repl '<nixpkgs>' + # nix repl --file example.nix + Loading Installable ''... + Added 3 variables. - Loading '<nixpkgs>'... - Added 12428 variables. + # nix repl --expr '{a={b=3;c=4;};}' + Loading Installable ''... + Added 1 variables. + + # nix repl --expr '{a={b=3;c=4;};}' a + Loading Installable ''... + Added 1 variables. + + # nix repl --extra-experimental-features 'flakes repl-flake' nixpkgs + Loading Installable 'flake:nixpkgs#'... + Added 5 variables. + + nix-repl> legacyPackages.x86_64-linux.emacs.name + "emacs-27.1" + + nix-repl> legacyPackages.x86_64-linux.emacs.name + "emacs-27.1" + + nix-repl> :q + + # nix repl --expr 'import <nixpkgs>{}' + + Loading Installable ''... + Added 12439 variables. nix-repl> emacs.name "emacs-27.1" diff --git a/src/nix/run.cc b/src/nix/run.cc index a67c23bcb..45d2dfd0d 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -38,13 +38,16 @@ void runProgramInStore(ref<Store> store, unshare(CLONE_NEWUSER) doesn't work in a multithreaded program (which "nix" is), so we exec() a single-threaded helper program (chrootHelper() below) to do the work. */ - auto store2 = store.dynamic_pointer_cast<LocalStore>(); + auto store2 = store.dynamic_pointer_cast<LocalFSStore>(); - if (store2 && store->storeDir != store2->getRealStoreDir()) { + if (!store2) + throw Error("store '%s' is not a local store so it does not support command execution", store->getUri()); + + if (store->storeDir != store2->getRealStoreDir()) { Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program }; for (auto & arg : args) helperArgs.push_back(arg); - execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data()); + execv(getSelfExe().value_or("nix").c_str(), stringsToCharPtrs(helperArgs).data()); throw SysError("could not execute chroot helper"); } @@ -91,7 +94,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment void run(ref<Store> store) override { - auto outPaths = toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, installables); + auto outPaths = Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, installables); auto accessor = store->getFSAccessor(); @@ -179,6 +182,7 @@ struct CmdRun : InstallableCommand { auto state = getEvalState(); + lockFlags.applyNixConfig = true; auto app = installable->toApp(*state).resolve(getEvalStore(), store); Strings allArgs{app.program}; diff --git a/src/nix/search.cc b/src/nix/search.cc index e9307342c..bdd45cbed 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -9,7 +9,7 @@ #include "shared.hh" #include "eval-cache.hh" #include "attr-path.hh" -#include "fmt.hh" +#include "hilite.hh" #include <regex> #include <fstream> @@ -18,16 +18,26 @@ using namespace nix; std::string wrap(std::string prefix, std::string s) { - return prefix + s + ANSI_NORMAL; + return concatStrings(prefix, s, ANSI_NORMAL); } struct CmdSearch : InstallableCommand, MixJSON { std::vector<std::string> res; + std::vector<std::string> excludeRes; CmdSearch() { expectArgs("regex", &res); + addFlag(Flag { + .longName = "exclude", + .shortName = 'e', + .description = "Hide packages whose attribute path, name or description contain *regex*.", + .labels = {"regex"}, + .handler = {[this](std::string s) { + excludeRes.push_back(s); + }}, + }); } std::string description() override @@ -62,11 +72,16 @@ struct CmdSearch : InstallableCommand, MixJSON res.push_back("^"); std::vector<std::regex> regexes; + std::vector<std::regex> excludeRegexes; regexes.reserve(res.size()); + excludeRegexes.reserve(excludeRes.size()); for (auto & re : res) regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase)); + for (auto & re : excludeRes) + excludeRegexes.emplace_back(re, std::regex::extended | std::regex::icase); + auto state = getEvalState(); auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr; @@ -77,13 +92,15 @@ struct CmdSearch : InstallableCommand, MixJSON visit = [&](eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath, bool initialRecurse) { + auto attrPathS = state->symbols.resolve(attrPath); + Activity act(*logger, lvlInfo, actUnknown, - fmt("evaluating '%s'", concatStringsSep(".", attrPath))); + fmt("evaluating '%s'", concatStringsSep(".", attrPathS))); try { auto recurse = [&]() { for (const auto & attr : cursor.getAttrs()) { - auto cursor2 = cursor.getAttr(attr); + auto cursor2 = cursor.getAttr(state->symbols[attr]); auto attrPath2(attrPath); attrPath2.push_back(attr); visit(*cursor2, attrPath2, false); @@ -91,19 +108,27 @@ struct CmdSearch : InstallableCommand, MixJSON }; if (cursor.isDerivation()) { - DrvName name(cursor.getAttr("name")->getString()); + DrvName name(cursor.getAttr(state->sName)->getString()); - auto aMeta = cursor.maybeGetAttr("meta"); - auto aDescription = aMeta ? aMeta->maybeGetAttr("description") : nullptr; + auto aMeta = cursor.maybeGetAttr(state->sMeta); + auto aDescription = aMeta ? aMeta->maybeGetAttr(state->sDescription) : nullptr; auto description = aDescription ? aDescription->getString() : ""; std::replace(description.begin(), description.end(), '\n', ' '); - auto attrPath2 = concatStringsSep(".", attrPath); + auto attrPath2 = concatStringsSep(".", attrPathS); std::vector<std::smatch> attrPathMatches; std::vector<std::smatch> descriptionMatches; std::vector<std::smatch> nameMatches; bool found = false; + for (auto & regex : excludeRegexes) { + if ( + std::regex_search(attrPath2, regex) + || std::regex_search(name.name, regex) + || std::regex_search(description, regex)) + return; + } + for (auto & regex : regexes) { found = false; auto addAll = [&found](std::sregex_iterator it, std::vector<std::smatch> & vec) { @@ -131,42 +156,42 @@ struct CmdSearch : InstallableCommand, MixJSON jsonElem.attr("version", name.version); jsonElem.attr("description", description); } else { - auto name2 = hiliteMatches(name.name, std::move(nameMatches), ANSI_GREEN, "\e[0;2m"); + auto name2 = hiliteMatches(name.name, nameMatches, ANSI_GREEN, "\e[0;2m"); if (results > 1) logger->cout(""); logger->cout( "* %s%s", - wrap("\e[0;1m", hiliteMatches(attrPath2, std::move(attrPathMatches), ANSI_GREEN, "\e[0;1m")), + wrap("\e[0;1m", hiliteMatches(attrPath2, attrPathMatches, ANSI_GREEN, "\e[0;1m")), name.version != "" ? " (" + name.version + ")" : ""); if (description != "") logger->cout( - " %s", hiliteMatches(description, std::move(descriptionMatches), ANSI_GREEN, ANSI_NORMAL)); + " %s", hiliteMatches(description, descriptionMatches, ANSI_GREEN, ANSI_NORMAL)); } } } else if ( attrPath.size() == 0 - || (attrPath[0] == "legacyPackages" && attrPath.size() <= 2) - || (attrPath[0] == "packages" && attrPath.size() <= 2)) + || (attrPathS[0] == "legacyPackages" && attrPath.size() <= 2) + || (attrPathS[0] == "packages" && attrPath.size() <= 2)) recurse(); else if (initialRecurse) recurse(); - else if (attrPath[0] == "legacyPackages" && attrPath.size() > 2) { + else if (attrPathS[0] == "legacyPackages" && attrPath.size() > 2) { auto attr = cursor.maybeGetAttr(state->sRecurseForDerivations); if (attr && attr->getBool()) recurse(); } } catch (EvalError & e) { - if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages")) + if (!(attrPath.size() > 0 && attrPathS[0] == "legacyPackages")) throw; } }; - for (auto & [cursor, prefix] : installable->getCursors(*state)) - visit(*cursor, parseAttrPath(*state, prefix), true); + for (auto & cursor : installable->getCursors(*state)) + visit(*cursor, cursor->getAttrPath(), true); if (!json && !results) throw Error("no results for the given search term(s)!"); diff --git a/src/nix/search.md b/src/nix/search.md index d182788a6..5a5b5ae05 100644 --- a/src/nix/search.md +++ b/src/nix/search.md @@ -43,12 +43,23 @@ R""( # nix search nixpkgs 'firefox|chromium' ``` -* Search for packages containing `git'`and either `frontend` or `gui`: +* Search for packages containing `git` and either `frontend` or `gui`: ```console # nix search nixpkgs git 'frontend|gui' ``` +* Search for packages containing `neovim` but hide ones containing either `gui` or `python`: + + ```console + # nix search nixpkgs neovim -e 'python|gui' + ``` + or + + ```console + # nix search nixpkgs neovim -e 'python' -e 'gui' + ``` + # Description `nix search` searches *installable* (which must be evaluatable, e.g. a diff --git a/src/nix/shell.md b/src/nix/shell.md index 90b81fb2f..9fa1031f5 100644 --- a/src/nix/shell.md +++ b/src/nix/shell.md @@ -23,6 +23,12 @@ R""( Hi everybody! ``` +* Run multiple commands in a shell environment: + + ```console + # nix shell nixpkgs#gnumake -c sh -c "cd src && make" + ``` + * Run GNU Hello in a chroot store: ```console diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc index c614be68d..fb46b4dbf 100644 --- a/src/nix/show-derivation.cc +++ b/src/nix/show-derivation.cc @@ -40,7 +40,7 @@ struct CmdShowDerivation : InstallablesCommand void run(ref<Store> store) override { - auto drvPaths = toDerivations(store, installables, true); + auto drvPaths = Installable::toDerivations(store, installables, true); if (recursive) { StorePathSet closure; @@ -65,19 +65,23 @@ struct CmdShowDerivation : InstallablesCommand auto & outputName = _outputName; // work around clang bug auto outputObj { outputsObj.object(outputName) }; std::visit(overloaded { - [&](const DerivationOutputInputAddressed & doi) { + [&](const DerivationOutput::InputAddressed & doi) { outputObj.attr("path", store->printStorePath(doi.path)); }, - [&](const DerivationOutputCAFixed & dof) { + [&](const DerivationOutput::CAFixed & dof) { outputObj.attr("path", store->printStorePath(dof.path(*store, drv.name, outputName))); outputObj.attr("hashAlgo", dof.hash.printMethodAlgo()); outputObj.attr("hash", dof.hash.hash.to_string(Base16, false)); }, - [&](const DerivationOutputCAFloating & dof) { + [&](const DerivationOutput::CAFloating & dof) { outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)); }, - [&](const DerivationOutputDeferred &) {}, - }, output.output); + [&](const DerivationOutput::Deferred &) {}, + [&](const DerivationOutput::Impure & doi) { + outputObj.attr("hashAlgo", makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType)); + outputObj.attr("impure", true); + }, + }, output.raw()); } } diff --git a/src/nix/store-copy-log.cc b/src/nix/store-copy-log.cc index 079cd6b3e..2e288f743 100644 --- a/src/nix/store-copy-log.cc +++ b/src/nix/store-copy-log.cc @@ -1,6 +1,8 @@ #include "command.hh" #include "shared.hh" #include "store-api.hh" +#include "store-cast.hh" +#include "log-store.hh" #include "sync.hh" #include "thread-pool.hh" @@ -26,7 +28,10 @@ struct CmdCopyLog : virtual CopyCommand, virtual InstallablesCommand void run(ref<Store> srcStore) override { + auto & srcLogStore = require<LogStore>(*srcStore); + auto dstStore = getDstStore(); + auto & dstLogStore = require<LogStore>(*dstStore); StorePathSet drvPaths; @@ -35,8 +40,8 @@ struct CmdCopyLog : virtual CopyCommand, virtual InstallablesCommand drvPaths.insert(drvPath); for (auto & drvPath : drvPaths) { - if (auto log = srcStore->getBuildLog(drvPath)) - dstStore->addBuildLog(drvPath, *log); + if (auto log = srcLogStore.getBuildLog(drvPath)) + dstLogStore.addBuildLog(drvPath, *log); else throw Error("build log for '%s' is not available", srcStore->printStorePath(drvPath)); } diff --git a/src/nix/store-delete.cc b/src/nix/store-delete.cc index e4a3cb554..ca43f1530 100644 --- a/src/nix/store-delete.cc +++ b/src/nix/store-delete.cc @@ -2,6 +2,8 @@ #include "common-args.hh" #include "shared.hh" #include "store-api.hh" +#include "store-cast.hh" +#include "gc-store.hh" using namespace nix; @@ -32,12 +34,14 @@ struct CmdStoreDelete : StorePathsCommand void run(ref<Store> store, std::vector<StorePath> && storePaths) override { + auto & gcStore = require<GcStore>(*store); + for (auto & path : storePaths) options.pathsToDelete.insert(path); GCResults results; PrintFreed freed(true, results); - store->collectGarbage(options, results); + gcStore.collectGarbage(options, results); } }; diff --git a/src/nix/store-gc.cc b/src/nix/store-gc.cc index a2d74066e..8b9b5d164 100644 --- a/src/nix/store-gc.cc +++ b/src/nix/store-gc.cc @@ -2,6 +2,8 @@ #include "common-args.hh" #include "shared.hh" #include "store-api.hh" +#include "store-cast.hh" +#include "gc-store.hh" using namespace nix; @@ -33,10 +35,12 @@ struct CmdStoreGC : StoreCommand, MixDryRun void run(ref<Store> store) override { + auto & gcStore = require<GcStore>(*store); + options.action = dryRun ? GCOptions::gcReturnDead : GCOptions::gcDeleteDead; GCResults results; PrintFreed freed(options.action == GCOptions::gcDeleteDead, results); - store->collectGarbage(options, results); + gcStore.collectGarbage(options, results); } }; diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 17a5a77ee..2d2453395 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -34,7 +34,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand std::string description() override { - return "upgrade Nix to the latest stable version"; + return "upgrade Nix to the stable version declared in Nixpkgs"; } std::string doc() override diff --git a/src/nix/upgrade-nix.md b/src/nix/upgrade-nix.md index 4d27daad9..084c80ba2 100644 --- a/src/nix/upgrade-nix.md +++ b/src/nix/upgrade-nix.md @@ -2,7 +2,7 @@ R""( # Examples -* Upgrade Nix to the latest stable version: +* Upgrade Nix to the stable version declared in Nixpkgs: ```console # nix upgrade-nix @@ -16,8 +16,11 @@ R""( # Description -This command upgrades Nix to the latest version. By default, it -locates the directory containing the `nix` binary in the `$PATH` +This command upgrades Nix to the stable version declared in Nixpkgs. +This stable version is defined in [nix-fallback-paths.nix](https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix) +and updated manually. It may not always be the latest tagged release. + +By default, it locates the directory containing the `nix` binary in the `$PATH` environment variable. If that directory is a Nix profile, it will upgrade the `nix` package in that profile to the latest stable binary release. diff --git a/src/nix/verify.cc b/src/nix/verify.cc index e92df1303..efa2434dc 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -41,7 +41,7 @@ struct CmdVerify : StorePathsCommand addFlag({ .longName = "sigs-needed", .shortName = 'n', - .description = "Require that each path has at least *n* valid signatures.", + .description = "Require that each path is signed by at least *n* different keys.", .labels = {"n"}, .handler = {&sigsNeeded} }); diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index f7a3ec3a0..1d9ab28ba 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -82,9 +82,9 @@ struct CmdWhyDepends : SourceExprCommand void run(ref<Store> store) override { auto package = parseInstallable(store, _package); - auto packagePath = toStorePath(getEvalStore(), store, Realise::Outputs, operateOn, package); + auto packagePath = Installable::toStorePath(getEvalStore(), store, Realise::Outputs, operateOn, package); auto dependency = parseInstallable(store, _dependency); - auto dependencyPath = toStorePath(getEvalStore(), store, Realise::Derivation, operateOn, dependency); + auto dependencyPath = Installable::toStorePath(getEvalStore(), store, Realise::Derivation, operateOn, dependency); auto dependencyPathHash = dependencyPath.hashPart(); StorePathSet closure; diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc index 4dd691981..c6023eb03 100644 --- a/src/resolve-system-dependencies/resolve-system-dependencies.cc +++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc @@ -176,7 +176,7 @@ int main(int argc, char ** argv) impurePaths.insert(argv[2]); else { auto drv = store->derivationFromPath(store->parseStorePath(argv[1])); - impurePaths = tokenizeString<StringSet>(get(drv.env, "__impureHostDeps").value_or("")); + impurePaths = tokenizeString<StringSet>(getOr(drv.env, "__impureHostDeps", "")); impurePaths.insert("/usr/lib/libSystem.dylib"); } diff --git a/tests/bash-profile.sh b/tests/bash-profile.sh new file mode 100644 index 000000000..e2e0d1090 --- /dev/null +++ b/tests/bash-profile.sh @@ -0,0 +1,9 @@ +source common.sh + +sed -e "s|@localstatedir@|$TEST_ROOT/profile-var|g" -e "s|@coreutils@|$coreutils|g" < ../scripts/nix-profile.sh.in > $TEST_ROOT/nix-profile.sh + +user=$(whoami) +rm -rf $TEST_HOME $TEST_ROOT/profile-var +mkdir -p $TEST_HOME +USER=$user $SHELL -e -c ". $TEST_ROOT/nix-profile.sh; set" +USER=$user $SHELL -e -c ". $TEST_ROOT/nix-profile.sh" # test idempotency diff --git a/tests/binary-cache.sh b/tests/binary-cache.sh index 2368884f7..0361ac6a8 100644 --- a/tests/binary-cache.sh +++ b/tests/binary-cache.sh @@ -1,6 +1,6 @@ source common.sh -needLocalStore "“--no-require-sigs” can’t be used with the daemon" +needLocalStore "'--no-require-sigs' can’t be used with the daemon" # We can produce drvs directly into the binary cache clearStore diff --git a/tests/build-dry.sh b/tests/build-dry.sh index e72533e70..5f29239dc 100644 --- a/tests/build-dry.sh +++ b/tests/build-dry.sh @@ -18,9 +18,6 @@ nix-build --no-out-link dependencies.nix --dry-run 2>&1 | grep "will be built" # Now new command: nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built" -# TODO: XXX: FIXME: #1793 -# Disable this part of the test until the problem is resolved: -if [ -n "$ISSUE_1795_IS_FIXED" ]; then clearStore clearCache @@ -28,7 +25,6 @@ clearCache nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built" # Now old command: nix-build --no-out-link dependencies.nix --dry-run 2>&1 | grep "will be built" -fi ################################################### # Check --dry-run doesn't create links with --dry-run @@ -50,3 +46,22 @@ nix build -f dependencies.nix -o $RESULT --dry-run nix build -f dependencies.nix -o $RESULT [[ -h $RESULT ]] + +################################################### +# Check the JSON output +clearStore +clearCache + +RES=$(nix build -f dependencies.nix --dry-run --json) + +if [[ -z "$NIX_TESTS_CA_BY_DEFAULT" ]]; then + echo "$RES" | jq '.[0] | [ + (.drvPath | test("'$NIX_STORE_DIR'.*\\.drv")), + (.outputs.out | test("'$NIX_STORE_DIR'")) + ] | all' +else + echo "$RES" | jq '.[0] | [ + (.drvPath | test("'$NIX_STORE_DIR'.*\\.drv")), + .outputs.out == null + ] | all' +fi diff --git a/tests/build-hook-ca-fixed.nix b/tests/build-hook-ca-fixed.nix index ec7171ac9..4cb9e85d1 100644 --- a/tests/build-hook-ca-fixed.nix +++ b/tests/build-hook-ca-fixed.nix @@ -11,13 +11,13 @@ let args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")]; outputHashMode = "recursive"; outputHashAlgo = "sha256"; - } // removeAttrs args ["builder" "meta"]) - // { meta = args.meta or {}; }; + } // removeAttrs args ["builder" "meta" "passthru"]) + // { meta = args.meta or {}; passthru = args.passthru or {}; }; input1 = mkDerivation { shell = busybox; name = "build-remote-input-1"; - buildCommand = "echo FOO > $out"; + buildCommand = "echo hi-input1; echo FOO > $out"; requiredSystemFeatures = ["foo"]; outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="; }; @@ -25,7 +25,7 @@ let input2 = mkDerivation { shell = busybox; name = "build-remote-input-2"; - buildCommand = "echo BAR > $out"; + buildCommand = "echo hi; echo BAR > $out"; requiredSystemFeatures = ["bar"]; outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q="; }; @@ -34,6 +34,7 @@ let shell = busybox; name = "build-remote-input-3"; buildCommand = '' + echo hi-input3 read x < ${input2} echo $x BAZ > $out ''; @@ -46,6 +47,7 @@ in mkDerivation { shell = busybox; name = "build-remote"; + passthru = { inherit input1 input2 input3; }; buildCommand = '' read x < ${input1} diff --git a/tests/build-hook.nix b/tests/build-hook.nix index eb16676f0..643334caa 100644 --- a/tests/build-hook.nix +++ b/tests/build-hook.nix @@ -9,20 +9,20 @@ let inherit system; builder = busybox; args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")]; - } // removeAttrs args ["builder" "meta"]) - // { meta = args.meta or {}; }; + } // removeAttrs args ["builder" "meta" "passthru"]) + // { meta = args.meta or {}; passthru = args.passthru or {}; }; input1 = mkDerivation { shell = busybox; name = "build-remote-input-1"; - buildCommand = "echo FOO > $out"; + buildCommand = "echo hi-input1; echo FOO > $out"; requiredSystemFeatures = ["foo"]; }; input2 = mkDerivation { shell = busybox; name = "build-remote-input-2"; - buildCommand = "echo BAR > $out"; + buildCommand = "echo hi; echo BAR > $out"; requiredSystemFeatures = ["bar"]; }; @@ -30,6 +30,7 @@ let shell = busybox; name = "build-remote-input-3"; buildCommand = '' + echo hi-input3 read x < ${input2} echo $x BAZ > $out ''; @@ -41,6 +42,7 @@ in mkDerivation { shell = busybox; name = "build-remote"; + passthru = { inherit input1 input2 input3; }; buildCommand = '' read x < ${input1} diff --git a/tests/build-remote-content-addressed-floating.sh b/tests/build-remote-content-addressed-floating.sh index 13ef47d2d..e83b42b41 100644 --- a/tests/build-remote-content-addressed-floating.sh +++ b/tests/build-remote-content-addressed-floating.sh @@ -2,7 +2,7 @@ source common.sh file=build-hook-ca-floating.nix -sed -i 's/experimental-features .*/& ca-derivations/' "$NIX_CONF_DIR"/nix.conf +enableFeatures "ca-derivations" CONTENT_ADDRESSED=true diff --git a/tests/build-remote.sh b/tests/build-remote.sh index 806c6d261..e73c37ea4 100644 --- a/tests/build-remote.sh +++ b/tests/build-remote.sh @@ -34,6 +34,14 @@ outPath=$(readlink -f $TEST_ROOT/result) grep 'FOO BAR BAZ' $TEST_ROOT/machine0/$outPath +testPrintOutPath=$(nix build -L -v -f $file --no-link --print-out-paths --max-jobs 0 \ + --arg busybox $busybox \ + --store $TEST_ROOT/machine0 \ + --builders "$(join_by '; ' "${builders[@]}")" +) + +[[ $testPrintOutPath =~ store.*build-remote ]] + set -o pipefail # Ensure that input1 was built on store1 due to the required feature. @@ -54,8 +62,17 @@ nix path-info --store $TEST_ROOT/machine3 --all \ | grep -v builder-build-remote-input-2.sh \ | grep builder-build-remote-input-3.sh + +# Temporarily disabled because of https://github.com/NixOS/nix/issues/6209 +if [[ -z "$CONTENT_ADDRESSED" ]]; then + for i in input1 input3; do + nix log --store $TEST_ROOT/machine0 --file "$file" --arg busybox $busybox passthru."$i" | grep hi-$i + done +fi + # Behavior of keep-failed out="$(nix-build 2>&1 failing.nix \ + --no-out-link \ --builders "$(join_by '; ' "${builders[@]}")" \ --keep-failed \ --store $TEST_ROOT/machine0 \ diff --git a/tests/build.sh b/tests/build.sh index c77f620f7..fc6825e25 100644 --- a/tests/build.sh +++ b/tests/build.sh @@ -1,15 +1,68 @@ source common.sh -expectedJSONRegex='\[\{"drvPath":".*multiple-outputs-a.drv","outputs":\{"first":".*multiple-outputs-a-first","second":".*multiple-outputs-a-second"}},\{"drvPath":".*multiple-outputs-b.drv","outputs":\{"out":".*multiple-outputs-b"}}]' -nix build -f multiple-outputs.nix --json a.all b.all --no-link | jq --exit-status ' +clearStore + +set -o pipefail + +# Make sure that 'nix build' returns all outputs by default. +nix build -f multiple-outputs.nix --json a b --no-link | jq --exit-status ' (.[0] | (.drvPath | match(".*multiple-outputs-a.drv")) and + (.outputs | keys | length == 2) and (.outputs.first | match(".*multiple-outputs-a-first")) and (.outputs.second | match(".*multiple-outputs-a-second"))) and (.[1] | (.drvPath | match(".*multiple-outputs-b.drv")) and + (.outputs | keys | length == 1) and (.outputs.out | match(".*multiple-outputs-b"))) ' + +# Test output selection using the '^' syntax. +nix build -f multiple-outputs.nix --json a^first --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-a.drv")) and + (.outputs | keys == ["first"])) +' + +nix build -f multiple-outputs.nix --json a^second,first --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-a.drv")) and + (.outputs | keys == ["first", "second"])) +' + +nix build -f multiple-outputs.nix --json 'a^*' --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-a.drv")) and + (.outputs | keys == ["first", "second"])) +' + +# Test that 'outputsToInstall' is respected by default. +nix build -f multiple-outputs.nix --json e --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-e.drv")) and + (.outputs | keys == ["a", "b"])) +' + +# But not when it's overriden. +nix build -f multiple-outputs.nix --json e^a --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-e.drv")) and + (.outputs | keys == ["a"])) +' + +nix build -f multiple-outputs.nix --json 'e^*' --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-e.drv")) and + (.outputs | keys == ["a", "b", "c"])) +' + +# Make sure that `--impure` works (regression test for https://github.com/NixOS/nix/issues/6488) +nix build --impure -f multiple-outputs.nix --json e --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-e.drv")) and + (.outputs | keys == ["a", "b"])) +' + testNormalization () { clearStore outPath=$(nix-build ./simple.nix --no-out-link) diff --git a/tests/ca-shell.nix b/tests/ca-shell.nix index ad2ab6aff..36e1d1526 100644 --- a/tests/ca-shell.nix +++ b/tests/ca-shell.nix @@ -1 +1 @@ -{ ... }@args: import ./shell.nix (args // { contentAddressed = true; }) +{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; }) diff --git a/tests/ca/build-dry.sh b/tests/ca/build-dry.sh new file mode 100644 index 000000000..9a72075ec --- /dev/null +++ b/tests/ca/build-dry.sh @@ -0,0 +1,6 @@ +source common.sh + +export NIX_TESTS_CA_BY_DEFAULT=1 + +cd .. && source build-dry.sh + diff --git a/tests/ca/build.sh b/tests/ca/build.sh index c8877f87f..92f8b429a 100644 --- a/tests/ca/build.sh +++ b/tests/ca/build.sh @@ -37,7 +37,7 @@ testCutoffFor () { } testCutoff () { - # Don't directly build depenentCA, that way we'll make sure we dodn't rely on + # Don't directly build dependentCA, that way we'll make sure we don't rely on # dependent derivations always being already built. #testDerivation dependentCA testCutoffFor transitivelyDependentCA diff --git a/tests/ca/common.sh b/tests/ca/common.sh index c5aa34334..b104b5a78 100644 --- a/tests/ca/common.sh +++ b/tests/ca/common.sh @@ -1,5 +1,5 @@ source ../common.sh -sed -i 's/experimental-features .*/& ca-derivations ca-references/' "$NIX_CONF_DIR"/nix.conf +enableFeatures "ca-derivations" restartDaemon diff --git a/tests/ca/content-addressed.nix b/tests/ca/content-addressed.nix index d328fc92c..81bc4bf5c 100644 --- a/tests/ca/content-addressed.nix +++ b/tests/ca/content-addressed.nix @@ -23,7 +23,7 @@ rec { }; rootCA = mkCADerivation { name = "rootCA"; - outputs = [ "out" "dev" "foo"]; + outputs = [ "out" "dev" "foo" ]; buildCommand = '' echo "building a CA derivation" echo "The seed is ${toString seed}" @@ -64,8 +64,7 @@ rec { dependentFixedOutput = mkDerivation { name = "dependent-fixed-output"; outputHashMode = "recursive"; - outputHashAlgo = "sha256"; - outputHash = "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA="; + outputHash = "sha512-7aJcmSuEuYP5tGKcmGY8bRr/lrCjJlOxP2mIUjO/vMQeg6gx/65IbzRWES8EKiPDOs9z+wF30lEfcwxM/cT4pw=="; buildCommand = '' cat ${dependentCA}/dep echo foo > $out @@ -76,7 +75,7 @@ rec { buildCommand = '' mkdir -p $out/bin echo ${rootCA} # Just to make it depend on it - echo "" > $out/bin/${name} + echo "#! ${shell}" > $out/bin/${name} chmod +x $out/bin/${name} ''; }; diff --git a/tests/ca/duplicate-realisation-in-closure.sh b/tests/ca/duplicate-realisation-in-closure.sh index 74c5d25fd..da9cd8fb4 100644 --- a/tests/ca/duplicate-realisation-in-closure.sh +++ b/tests/ca/duplicate-realisation-in-closure.sh @@ -2,8 +2,6 @@ source ./common.sh requireDaemonNewerThan "2.4pre20210625" -sed -i 's/experimental-features .*/& ca-derivations ca-references/' "$NIX_CONF_DIR"/nix.conf - export REMOTE_STORE_DIR="$TEST_ROOT/remote_store" export REMOTE_STORE="file://$REMOTE_STORE_DIR" diff --git a/tests/ca/nix-copy.sh b/tests/ca/nix-copy.sh index 2e0dea2d2..7a8307a4e 100755 --- a/tests/ca/nix-copy.sh +++ b/tests/ca/nix-copy.sh @@ -2,9 +2,6 @@ source common.sh -# Globally enable the ca derivations experimental flag -sed -i 's/experimental-features = .*/& ca-derivations ca-references/' "$NIX_CONF_DIR/nix.conf" - export REMOTE_STORE_DIR="$TEST_ROOT/remote_store" export REMOTE_STORE="file://$REMOTE_STORE_DIR" diff --git a/tests/ca/nix-run.sh b/tests/ca/nix-run.sh index 81402af10..5f46518e8 100755 --- a/tests/ca/nix-run.sh +++ b/tests/ca/nix-run.sh @@ -2,8 +2,6 @@ source common.sh -sed -i 's/experimental-features .*/& ca-derivations ca-references nix-command flakes/' "$NIX_CONF_DIR"/nix.conf - FLAKE_PATH=path:$PWD nix run --no-write-lock-file $FLAKE_PATH#runnable diff --git a/tests/ca/nix-shell.sh b/tests/ca/nix-shell.sh index 7f1a3a73e..1c5a6639f 100755 --- a/tests/ca/nix-shell.sh +++ b/tests/ca/nix-shell.sh @@ -2,8 +2,6 @@ source common.sh -sed -i 's/experimental-features .*/& ca-derivations ca-references nix-command flakes/' "$NIX_CONF_DIR"/nix.conf - CONTENT_ADDRESSED=true cd .. source ./nix-shell.sh diff --git a/tests/ca/post-hook.sh b/tests/ca/post-hook.sh index 1c9d4f700..705bde9d4 100755 --- a/tests/ca/post-hook.sh +++ b/tests/ca/post-hook.sh @@ -4,8 +4,6 @@ source common.sh requireDaemonNewerThan "2.4pre20210626" -sed -i 's/experimental-features .*/& ca-derivations ca-references nix-command flakes/' "$NIX_CONF_DIR"/nix.conf - export NIX_TESTS_CA_BY_DEFAULT=1 cd .. source ./post-hook.sh diff --git a/tests/ca/recursive.sh b/tests/ca/recursive.sh index 648bf0a91..0354d23b4 100755 --- a/tests/ca/recursive.sh +++ b/tests/ca/recursive.sh @@ -4,8 +4,6 @@ source common.sh requireDaemonNewerThan "2.4pre20210623" -sed -i 's/experimental-features .*/& ca-derivations ca-references nix-command flakes/' "$NIX_CONF_DIR"/nix.conf - export NIX_TESTS_CA_BY_DEFAULT=1 cd .. source ./recursive.sh diff --git a/tests/ca/selfref-gc.sh b/tests/ca/selfref-gc.sh new file mode 100755 index 000000000..248778894 --- /dev/null +++ b/tests/ca/selfref-gc.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +source common.sh + +requireDaemonNewerThan "2.4pre20210626" + +enableFeatures "ca-derivations nix-command flakes" + +export NIX_TESTS_CA_BY_DEFAULT=1 +cd .. +source ./selfref-gc.sh diff --git a/tests/ca/signatures.sh b/tests/ca/signatures.sh index 0c7d974ea..eb18a4130 100644 --- a/tests/ca/signatures.sh +++ b/tests/ca/signatures.sh @@ -1,8 +1,5 @@ source common.sh -# Globally enable the ca derivations experimental flag -sed -i 's/experimental-features = .*/& ca-derivations ca-references/' "$NIX_CONF_DIR/nix.conf" - clearStore clearCache diff --git a/tests/ca/substitute.sh b/tests/ca/substitute.sh index 3d9001bb8..819f3fd85 100644 --- a/tests/ca/substitute.sh +++ b/tests/ca/substitute.sh @@ -25,7 +25,8 @@ buildDrvs --substitute --substituters $REMOTE_STORE --no-require-sigs -j0 transi # Check that the thing we’ve just substituted has its realisation stored nix realisation info --file ./content-addressed.nix transitivelyDependentCA # Check that its dependencies have it too -nix realisation info --file ./content-addressed.nix dependentCA rootCA +nix realisation info --file ./content-addressed.nix dependentCA +# nix realisation info --file ./content-addressed.nix rootCA --outputs out # Same thing, but # 1. With non-ca derivations diff --git a/tests/check.sh b/tests/check.sh index ab48ff865..495202781 100644 --- a/tests/check.sh +++ b/tests/check.sh @@ -40,6 +40,14 @@ nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \ if grep -q 'may not be deterministic' $TEST_ROOT/log; then false; fi checkBuildTempDirRemoved $TEST_ROOT/log +nix build -f check.nix deterministic --rebuild --repeat 1 \ + --argstr checkBuildId $checkBuildId --keep-failed --no-link \ + 2> $TEST_ROOT/log +if grep -q 'checking is not possible' $TEST_ROOT/log; then false; fi +# Repeat is set to 1, ie. nix should build deterministic twice. +if [ "$(grep "checking outputs" $TEST_ROOT/log | wc -l)" -ne 2 ]; then false; fi +checkBuildTempDirRemoved $TEST_ROOT/log + nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \ --no-out-link 2> $TEST_ROOT/log checkBuildTempDirRemoved $TEST_ROOT/log @@ -50,6 +58,12 @@ grep 'may not be deterministic' $TEST_ROOT/log [ "$status" = "104" ] checkBuildTempDirRemoved $TEST_ROOT/log +nix build -f check.nix nondeterministic --rebuild --repeat 1 \ + --argstr checkBuildId $checkBuildId --keep-failed --no-link \ + 2> $TEST_ROOT/log || status=$? +grep 'may not be deterministic' $TEST_ROOT/log +checkBuildTempDirRemoved $TEST_ROOT/log + nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \ --no-out-link --check --keep-failed 2> $TEST_ROOT/log || status=$? grep 'may not be deterministic' $TEST_ROOT/log diff --git a/tests/common.sh.in b/tests/common.sh.in index e485329ba..73c2d2309 100644 --- a/tests/common.sh.in +++ b/tests/common.sh.in @@ -50,6 +50,8 @@ export busybox="@sandbox_shell@" export version=@PACKAGE_VERSION@ export system=@system@ +export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@ + export IMPURE_VAR1=foo export IMPURE_VAR2=bar @@ -87,8 +89,7 @@ startDaemon() { if [[ "$NIX_REMOTE" == daemon ]]; then return fi - # Start the daemon, wait for the socket to appear. !!! - # ‘nix-daemon’ should have an option to fork into the background. + # Start the daemon, wait for the socket to appear. rm -f $NIX_DAEMON_SOCKET_PATH PATH=$DAEMON_PATH nix-daemon& pidDaemon=$! @@ -109,20 +110,20 @@ startDaemon() { killDaemon() { kill $pidDaemon for i in {0..100}; do - kill -0 $pidDaemon || break + kill -0 $pidDaemon 2> /dev/null || break sleep 0.1 done - kill -9 $pidDaemon || true + kill -9 $pidDaemon 2> /dev/null || true wait $pidDaemon || true trap "" EXIT } restartDaemon() { - [[ -z "${pidDaemon:-}" ]] && return 0 + [[ -z "${pidDaemon:-}" ]] && return 0 - killDaemon - unset NIX_REMOTE - startDaemon + killDaemon + unset NIX_REMOTE + startDaemon } if [[ $(uname) == Linux ]] && [[ -L /proc/self/ns/user ]] && unshare --user true; then @@ -158,11 +159,12 @@ expect() { local expected res expected="$1" shift - set +e - "$@" - res="$?" - set -e - [[ $res -eq $expected ]] + "$@" || res="$?" + if [[ $res -ne $expected ]]; then + echo "Expected '$expected' but got '$res' while running '$*'" + return 1 + fi + return 0 } needLocalStore() { @@ -173,14 +175,30 @@ needLocalStore() { } # Just to make it easy to find which tests should be fixed -buggyNeedLocalStore () { +buggyNeedLocalStore() { needLocalStore } +enableFeatures() { + local features="$1" + sed -i 's/experimental-features .*/& '"$features"'/' "$NIX_CONF_DIR"/nix.conf +} + set -x if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then startDaemon fi +onError() { + set +x + echo "$0: test failed at:" >&2 + for ((i = 1; i < ${#BASH_SOURCE[@]}; i++)); do + if [[ -z ${BASH_SOURCE[i]} ]]; then break; fi + echo " ${FUNCNAME[i]} in ${BASH_SOURCE[i]}:${BASH_LINENO[i-1]}" >&2 + done +} + +trap onError ERR + fi # COMMON_SH_SOURCED diff --git a/tests/completions.sh b/tests/completions.sh new file mode 100644 index 000000000..522aa1c86 --- /dev/null +++ b/tests/completions.sh @@ -0,0 +1,62 @@ +source common.sh + +cd "$TEST_ROOT" + +mkdir -p dep +cat <<EOF > dep/flake.nix +{ + outputs = i: { }; +} +EOF +mkdir -p foo +cat <<EOF > foo/flake.nix +{ + inputs.a.url = "path:$(realpath dep)"; + + outputs = i: { + sampleOutput = 1; + }; +} +EOF +mkdir -p bar +cat <<EOF > bar/flake.nix +{ + inputs.b.url = "path:$(realpath dep)"; + + outputs = i: { + sampleOutput = 1; + }; +} +EOF + +# Test the completion of a subcommand +[[ "$(NIX_GET_COMPLETIONS=1 nix buil)" == $'normal\nbuild\t' ]] +[[ "$(NIX_GET_COMPLETIONS=2 nix flake metad)" == $'normal\nmetadata\t' ]] + +# Filename completion +[[ "$(NIX_GET_COMPLETIONS=2 nix build ./f)" == $'filenames\n./foo\t' ]] +[[ "$(NIX_GET_COMPLETIONS=2 nix build ./nonexistent)" == $'filenames' ]] + +# Input override completion +[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --override-input '')" == $'normal\na\t' ]] +[[ "$(NIX_GET_COMPLETIONS=5 nix flake show ./foo --override-input '')" == $'normal\na\t' ]] +## With multiple input flakes +[[ "$(NIX_GET_COMPLETIONS=5 nix build ./foo ./bar --override-input '')" == $'normal\na\t\nb\t' ]] +## With tilde expansion +[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix build '~/foo' --override-input '')" == $'normal\na\t' ]] +## Out of order +[[ "$(NIX_GET_COMPLETIONS=3 nix build --update-input '' ./foo)" == $'normal\na\t' ]] +[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --update-input '' ./bar)" == $'normal\na\t\nb\t' ]] + +# Cli flag completion +NIX_GET_COMPLETIONS=2 nix build --log-form | grep -- "--log-format" + +# Config option completion +## With `--option` +NIX_GET_COMPLETIONS=3 nix build --option allow-import-from | grep -- "allow-import-from-derivation" +## As a cli flag – not working atm +# NIX_GET_COMPLETIONS=2 nix build --allow-import-from | grep -- "allow-import-from-derivation" + +# Attr path completions +[[ "$(NIX_GET_COMPLETIONS=2 nix eval ./foo\#sam)" == $'attrs\n./foo#sampleOutput\t' ]] +[[ "$(NIX_GET_COMPLETIONS=4 nix eval --file ./foo/flake.nix outp)" == $'attrs\noutputs\t' ]] diff --git a/tests/eval.nix b/tests/eval.nix new file mode 100644 index 000000000..befbd17a9 --- /dev/null +++ b/tests/eval.nix @@ -0,0 +1,5 @@ +{ + int = 123; + str = "foo"; + attr.foo = "bar"; +} diff --git a/tests/eval.sh b/tests/eval.sh new file mode 100644 index 000000000..d74976019 --- /dev/null +++ b/tests/eval.sh @@ -0,0 +1,31 @@ +source common.sh + +clearStore + +testStdinHeredoc=$(nix eval -f - <<EOF +{ + bar = 3 + 1; + foo = 2 + 2; +} +EOF +) +[[ $testStdinHeredoc == '{ bar = 4; foo = 4; }' ]] + +nix eval --expr 'assert 1 + 2 == 3; true' + +[[ $(nix eval int -f "./eval.nix") == 123 ]] +[[ $(nix eval str -f "./eval.nix") == '"foo"' ]] +[[ $(nix eval str --raw -f "./eval.nix") == 'foo' ]] +[[ $(nix eval attr -f "./eval.nix") == '{ foo = "bar"; }' ]] +[[ $(nix eval attr --json -f "./eval.nix") == '{"foo":"bar"}' ]] +[[ $(nix eval int -f - < "./eval.nix") == 123 ]] + +# Check if toFile can be utilized during restricted eval +[[ $(nix eval --restrict-eval --expr 'import (builtins.toFile "source" "42")') == 42 ]] + +nix-instantiate --eval -E 'assert 1 + 2 == 3; true' +[[ $(nix-instantiate -A int --eval "./eval.nix") == 123 ]] +[[ $(nix-instantiate -A str --eval "./eval.nix") == '"foo"' ]] +[[ $(nix-instantiate -A attr --eval "./eval.nix") == '{ foo = "bar"; }' ]] +[[ $(nix-instantiate -A attr --eval --json "./eval.nix") == '{"foo":"bar"}' ]] +[[ $(nix-instantiate -A int --eval - < "./eval.nix") == 123 ]] diff --git a/tests/fetchClosure.sh b/tests/fetchClosure.sh new file mode 100644 index 000000000..44050c878 --- /dev/null +++ b/tests/fetchClosure.sh @@ -0,0 +1,70 @@ +source common.sh + +enableFeatures "fetch-closure" +needLocalStore "'--no-require-sigs' can’t be used with the daemon" + +clearStore +clearCacheCache + +# Initialize binary cache. +nonCaPath=$(nix build --json --file ./dependencies.nix --no-link | jq -r .[].outputs.out) +caPath=$(nix store make-content-addressed --json $nonCaPath | jq -r '.rewrites | map(.) | .[]') +nix copy --to file://$cacheDir $nonCaPath + +# Test basic fetchClosure rewriting from non-CA to CA. +clearStore + +[ ! -e $nonCaPath ] +[ ! -e $caPath ] + +[[ $(nix eval -v --raw --expr " + builtins.fetchClosure { + fromStore = \"file://$cacheDir\"; + fromPath = $nonCaPath; + toPath = $caPath; + } +") = $caPath ]] + +[ ! -e $nonCaPath ] +[ -e $caPath ] + +# In impure mode, we can use non-CA paths. +[[ $(nix eval --raw --no-require-sigs --impure --expr " + builtins.fetchClosure { + fromStore = \"file://$cacheDir\"; + fromPath = $nonCaPath; + } +") = $nonCaPath ]] + +[ -e $nonCaPath ] + +# 'toPath' set to empty string should fail but print the expected path. +nix eval -v --json --expr " + builtins.fetchClosure { + fromStore = \"file://$cacheDir\"; + fromPath = $nonCaPath; + toPath = \"\"; + } +" 2>&1 | grep "error: rewriting.*$nonCaPath.*yielded.*$caPath" + +# If fromPath is CA, then toPath isn't needed. +nix copy --to file://$cacheDir $caPath + +[[ $(nix eval -v --raw --expr " + builtins.fetchClosure { + fromStore = \"file://$cacheDir\"; + fromPath = $caPath; + } +") = $caPath ]] + +# Check that URL query parameters aren't allowed. +clearStore +narCache=$TEST_ROOT/nar-cache +rm -rf $narCache +(! nix eval -v --raw --expr " + builtins.fetchClosure { + fromStore = \"file://$cacheDir?local-nar-cache=$narCache\"; + fromPath = $caPath; + } +") +(! [ -e $narCache ]) diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh index 89294d8d2..4ceba0293 100644 --- a/tests/fetchGit.sh +++ b/tests/fetchGit.sh @@ -7,11 +7,13 @@ fi clearStore -repo=$TEST_ROOT/git +# Intentionally not in a canonical form +# See https://github.com/NixOS/nix/issues/6195 +repo=$TEST_ROOT/./git export _NIX_FORCE_HTTP=1 -rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix $TEST_ROOT/worktree $TEST_ROOT/shallow +rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix $TEST_ROOT/worktree $TEST_ROOT/shallow $TEST_ROOT/minimal git init $repo git -C $repo config user.email "foobar@example.com" @@ -22,12 +24,14 @@ touch $repo/.gitignore git -C $repo add hello .gitignore git -C $repo commit -m 'Bla1' rev1=$(git -C $repo rev-parse HEAD) +git -C $repo tag -a tag1 -m tag1 echo world > $repo/hello git -C $repo commit -m 'Bla2' -a git -C $repo worktree add $TEST_ROOT/worktree echo hello >> $TEST_ROOT/worktree/hello rev2=$(git -C $repo rev-parse HEAD) +git -C $repo tag -a tag2 -m tag2 # Fetch a worktree unset _NIX_FORCE_HTTP @@ -147,13 +151,26 @@ path3=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") # (check dirty-tree handling was used) [[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]] [[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).shortRev") = 0000000 ]] +# Making a dirty tree clean again and fetching it should +# record correct revision information. See: #4140 +echo world > $repo/hello +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = $rev2 ]] # Committing shouldn't change store path, or switch to using 'master' +echo dev > $repo/hello git -C $repo commit -m 'Bla5' -a path4=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath") [[ $(cat $path4/hello) = dev ]] [[ $path3 = $path4 ]] +# Using remote path with branch other than 'master' should fetch the HEAD revision. +# (--tarball-ttl 0 to prevent using the cached repo above) +export _NIX_FORCE_HTTP=1 +path4=$(nix eval --tarball-ttl 0 --impure --raw --expr "(builtins.fetchGit $repo).outPath") +[[ $(cat $path4/hello) = dev ]] +[[ $path3 = $path4 ]] +unset _NIX_FORCE_HTTP + # Confirm same as 'dev' branch path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") [[ $path3 = $path5 ]] @@ -170,6 +187,14 @@ NIX=$(command -v nix) path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath") [[ $path3 = $path5 ]] +# Fetching from a repo with only a specific revision and no branches should +# not fall back to copying files and record correct revision information. See: #5302 +mkdir $TEST_ROOT/minimal +git -C $TEST_ROOT/minimal init +git -C $TEST_ROOT/minimal fetch $repo $rev2 +git -C $TEST_ROOT/minimal checkout $rev2 +[[ $(nix eval --impure --raw --expr "(builtins.fetchGit { url = $TEST_ROOT/minimal; }).rev") = $rev2 ]] + # Fetching a shallow repo shouldn't work by default, because we can't # return a revCount. git clone --depth 1 file://$repo $TEST_ROOT/shallow @@ -193,3 +218,21 @@ rev4_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$ # The name argument should be handled path9=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; name = \"foo\"; }).outPath") [[ $path9 =~ -foo$ ]] + +# Specifying a ref without a rev shouldn't pick a cached rev for a different ref +export _NIX_FORCE_HTTP=1 +rev_tag1_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"refs/tags/tag1\"; }).rev") +rev_tag1=$(git -C $repo rev-parse refs/tags/tag1) +[[ $rev_tag1_nix = $rev_tag1 ]] +rev_tag2_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"refs/tags/tag2\"; }).rev") +rev_tag2=$(git -C $repo rev-parse refs/tags/tag2) +[[ $rev_tag2_nix = $rev_tag2 ]] +unset _NIX_FORCE_HTTP + +# should fail if there is no repo +rm -rf $repo/.git +(! nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath") + +# should succeed for a repo without commits +git init $repo +path10=$(nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath") diff --git a/tests/fetchMercurial.sh b/tests/fetchMercurial.sh index 726840664..5c64ffd26 100644 --- a/tests/fetchMercurial.sh +++ b/tests/fetchMercurial.sh @@ -7,7 +7,9 @@ fi clearStore -repo=$TEST_ROOT/hg +# Intentionally not in a canonical form +# See https://github.com/NixOS/nix/issues/6195 +repo=$TEST_ROOT/./hg rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix @@ -28,6 +30,12 @@ echo world > $repo/hello hg commit --cwd $repo -m 'Bla2' rev2=$(hg log --cwd $repo -r tip --template '{node}') +# Fetch an unclean branch. +echo unclean > $repo/hello +path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath") +[[ $(cat $path/hello) = unclean ]] +hg revert --cwd $repo --all + # Fetch the default branch. path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath") [[ $(cat $path/hello) = world ]] diff --git a/tests/fetchPath.sh b/tests/fetchPath.sh new file mode 100644 index 000000000..29be38ce2 --- /dev/null +++ b/tests/fetchPath.sh @@ -0,0 +1,6 @@ +source common.sh + +touch $TEST_ROOT/foo -t 202211111111 +# We only check whether 2022-11-1* **:**:** is the last modified date since +# `lastModified` is transformed into UTC in `builtins.fetchTarball`. +[[ "$(nix eval --impure --raw --expr "(builtins.fetchTree \"path://$TEST_ROOT/foo\").lastModifiedDate")" =~ 2022111.* ]] diff --git a/tests/fetchTree-file.sh b/tests/fetchTree-file.sh new file mode 100644 index 000000000..f0c530466 --- /dev/null +++ b/tests/fetchTree-file.sh @@ -0,0 +1,105 @@ +source common.sh + +clearStore + +cd "$TEST_ROOT" + +test_fetch_file () { + echo foo > test_input + + input_hash="$(nix hash path test_input)" + + nix eval --impure --file - <<EOF + let + tree = builtins.fetchTree { type = "file"; url = "file://$PWD/test_input"; }; + in + assert (tree.narHash == "$input_hash"); + tree +EOF +} + +# Make sure that `http(s)` and `file` flake inputs are properly extracted when +# they should be, and treated as opaque files when they should be +test_file_flake_input () { + rm -fr "$TEST_ROOT/testFlake"; + mkdir "$TEST_ROOT/testFlake"; + pushd testFlake + + mkdir inputs + echo foo > inputs/test_input_file + tar cfa test_input.tar.gz inputs + cp test_input.tar.gz test_input_no_ext + input_tarball_hash="$(nix hash path test_input.tar.gz)" + input_directory_hash="$(nix hash path inputs)" + + cat <<EOF > flake.nix + { + inputs.no_ext_default_no_unpack = { + url = "file://$PWD/test_input_no_ext"; + flake = false; + }; + inputs.no_ext_explicit_unpack = { + url = "tarball+file://$PWD/test_input_no_ext"; + flake = false; + }; + inputs.tarball_default_unpack = { + url = "file://$PWD/test_input.tar.gz"; + flake = false; + }; + inputs.tarball_explicit_no_unpack = { + url = "file+file://$PWD/test_input.tar.gz"; + flake = false; + }; + outputs = { ... }: {}; + } +EOF + + nix flake update + nix eval --file - <<EOF + with (builtins.fromJSON (builtins.readFile ./flake.lock)); + + # Url inputs whose extension doesn’t match a known archive format should + # not be unpacked by default + assert (nodes.no_ext_default_no_unpack.locked.type == "file"); + assert (nodes.no_ext_default_no_unpack.locked.unpack or false == false); + assert (nodes.no_ext_default_no_unpack.locked.narHash == "$input_tarball_hash"); + + # For backwards compatibility, flake inputs that correspond to the + # old 'tarball' fetcher should still have their type set to 'tarball' + assert (nodes.tarball_default_unpack.locked.type == "tarball"); + # Unless explicitely specified, the 'unpack' parameter shouldn’t appear here + # because that would break older Nix versions + assert (!nodes.tarball_default_unpack.locked ? unpack); + assert (nodes.tarball_default_unpack.locked.narHash == "$input_directory_hash"); + + # Explicitely passing the unpack parameter should enforce the desired behavior + assert (nodes.no_ext_explicit_unpack.locked.narHash == nodes.tarball_default_unpack.locked.narHash); + assert (nodes.tarball_explicit_no_unpack.locked.narHash == nodes.no_ext_default_no_unpack.locked.narHash); + true +EOF + popd + + [[ -z "${NIX_DAEMON_PACKAGE}" ]] && return 0 + + # Ensure that a lockfile generated by the current Nix for tarball inputs + # can still be read by an older Nix + + cat <<EOF > flake.nix + { + inputs.tarball = { + url = "file://$PWD/test_input.tar.gz"; + flake = false; + }; + outputs = { self, tarball }: { + foo = builtins.readFile "${tarball}/test_input_file"; + }; + } + nix flake update + + clearStore + "$NIX_DAEMON_PACKAGE/bin/nix" eval .#foo +EOF +} + +test_fetch_file +test_file_flake_input diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh index 3d1685f43..b41d8c4b7 100644 --- a/tests/fetchurl.sh +++ b/tests/fetchurl.sh @@ -9,6 +9,10 @@ outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file: cmp $outPath fetchurl.sh +# Do not re-fetch paths already present. +outPath2=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file:///does-not-exist/must-remain-unused/fetchurl.sh --argstr sha256 $hash --no-out-link) +test "$outPath" == "$outPath2" + # Now using a base-64 hash. clearStore diff --git a/tests/flake-bundler.sh b/tests/flakes/bundle.sh index 9496b8f92..67bbb05ac 100644 --- a/tests/flake-bundler.sh +++ b/tests/flakes/bundle.sh @@ -1,9 +1,6 @@ source common.sh -clearStore -rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local - -cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME +cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME cd $TEST_HOME @@ -25,6 +22,7 @@ cat <<EOF > flake.nix }; } EOF + nix build .# nix bundle --bundler .# .# nix bundle --bundler .#bundlers.$system.default .#packages.$system.default @@ -32,6 +30,3 @@ nix bundle --bundler .#bundlers.$system.simple .#packages.$system.default nix bundle --bundler .#bundlers.$system.default .#apps.$system.default nix bundle --bundler .#bundlers.$system.simple .#apps.$system.default - -clearStore - diff --git a/tests/flakes/check.sh b/tests/flakes/check.sh new file mode 100644 index 000000000..f572aa75c --- /dev/null +++ b/tests/flakes/check.sh @@ -0,0 +1,89 @@ +source common.sh + +flakeDir=$TEST_ROOT/flake3 +mkdir -p $flakeDir + +cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + overlay = final: prev: { + }; + }; +} +EOF + +nix flake check $flakeDir + +cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + overlay = finalll: prev: { + }; + }; +} +EOF + +(! nix flake check $flakeDir) + +cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + nixosModules.foo = { + a.b.c = 123; + foo = true; + }; + }; +} +EOF + +nix flake check $flakeDir + +cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + nixosModules.foo = { + a.b.c = 123; + foo = assert false; true; + }; + }; +} +EOF + +(! nix flake check $flakeDir) + +cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + nixosModule = { config, pkgs, ... }: { + a.b.c = 123; + }; + }; +} +EOF + +nix flake check $flakeDir + +cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + nixosModule = { config, pkgs }: { + a.b.c = 123; + }; + }; +} +EOF + +(! nix flake check $flakeDir) + +cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + packages.system-1.default = "foo"; + packages.system-2.default = "bar"; + }; +} +EOF + +checkRes=$(nix flake check --keep-going $flakeDir 2>&1 && fail "nix flake check should have failed" || true) +echo "$checkRes" | grep -q "packages.system-1.default" +echo "$checkRes" | grep -q "packages.system-2.default" diff --git a/tests/flakes/circular.sh b/tests/flakes/circular.sh new file mode 100644 index 000000000..09cd02edf --- /dev/null +++ b/tests/flakes/circular.sh @@ -0,0 +1,49 @@ +# Test circular flake dependencies. +source ./common.sh + +requireGit + +flakeA=$TEST_ROOT/flakeA +flakeB=$TEST_ROOT/flakeB + +createGitRepo $flakeA +createGitRepo $flakeB + +cat > $flakeA/flake.nix <<EOF +{ + inputs.b.url = git+file://$flakeB; + inputs.b.inputs.a.follows = "/"; + + outputs = { self, b }: { + foo = 123 + b.bar; + xyzzy = 1000; + }; +} +EOF + +git -C $flakeA add flake.nix + +cat > $flakeB/flake.nix <<EOF +{ + inputs.a.url = git+file://$flakeA; + + outputs = { self, a }: { + bar = 456 + a.xyzzy; + }; +} +EOF + +git -C $flakeB add flake.nix +git -C $flakeB commit -a -m 'Foo' + +[[ $(nix eval $flakeA#foo) = 1579 ]] +[[ $(nix eval $flakeA#foo) = 1579 ]] + +sed -i $flakeB/flake.nix -e 's/456/789/' +git -C $flakeB commit -a -m 'Foo' + +[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]] + +# Test list-inputs with circular dependencies +nix flake metadata $flakeA + diff --git a/tests/flakes/common.sh b/tests/flakes/common.sh new file mode 100644 index 000000000..c333733c2 --- /dev/null +++ b/tests/flakes/common.sh @@ -0,0 +1,73 @@ +source ../common.sh + +registry=$TEST_ROOT/registry.json + +requireGit() { + if [[ -z $(type -p git) ]]; then + echo "Git not installed; skipping flake tests" + exit 99 + fi +} + +writeSimpleFlake() { + local flakeDir="$1" + cat > $flakeDir/flake.nix <<EOF +{ + description = "Bla bla"; + + outputs = inputs: rec { + packages.$system = rec { + foo = import ./simple.nix; + default = foo; + }; + + # To test "nix flake init". + legacyPackages.x86_64-linux.hello = import ./simple.nix; + }; +} +EOF + + cp ../simple.nix ../simple.builder.sh ../config.nix $flakeDir/ +} + +createSimpleGitFlake() { + local flakeDir="$1" + writeSimpleFlake $flakeDir + git -C $flakeDir add flake.nix simple.nix simple.builder.sh config.nix + git -C $flakeDir commit -m 'Initial' +} + +writeDependentFlake() { + local flakeDir="$1" + cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self, flake1 }: { + packages.$system.default = flake1.packages.$system.default; + expr = assert builtins.pathExists ./flake.lock; 123; + }; +} +EOF +} + +writeTrivialFlake() { + local flakeDir="$1" + cat > $flakeDir/flake.nix <<EOF +{ + outputs = { self }: { + expr = 123; + }; +} +EOF +} + +createGitRepo() { + local repo="$1" + local extraArgs="$2" + + rm -rf $repo $repo.tmp + mkdir -p $repo + + git -C $repo init $extraArgs + git -C $repo config user.email "foobar@example.com" + git -C $repo config user.name "Foobar" +} diff --git a/tests/flake-local-settings.sh b/tests/flakes/config.sh index e92c16f87..d1941a6be 100644 --- a/tests/flake-local-settings.sh +++ b/tests/flakes/config.sh @@ -1,9 +1,6 @@ source common.sh -clearStore -rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local - -cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME +cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME cd $TEST_HOME diff --git a/tests/flakes.sh b/tests/flakes/flakes.sh index ea629ae70..267e2cd6f 100644 --- a/tests/flakes.sh +++ b/tests/flakes/flakes.sh @@ -1,60 +1,30 @@ -source common.sh +source ./common.sh -if [[ -z $(type -p git) ]]; then - echo "Git not installed; skipping flake tests" - exit 99 -fi +requireGit clearStore rm -rf $TEST_HOME/.cache $TEST_HOME/.config -registry=$TEST_ROOT/registry.json - flake1Dir=$TEST_ROOT/flake1 flake2Dir=$TEST_ROOT/flake2 flake3Dir=$TEST_ROOT/flake3 flake5Dir=$TEST_ROOT/flake5 -flake6Dir=$TEST_ROOT/flake6 flake7Dir=$TEST_ROOT/flake7 -templatesDir=$TEST_ROOT/templates nonFlakeDir=$TEST_ROOT/nonFlake badFlakeDir=$TEST_ROOT/badFlake -flakeA=$TEST_ROOT/flakeA -flakeB=$TEST_ROOT/flakeB flakeGitBare=$TEST_ROOT/flakeGitBare -flakeFollowsA=$TEST_ROOT/follows/flakeA -flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB -flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC -flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD -flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE - -for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do - rm -rf $repo $repo.tmp - mkdir -p $repo - git -C $repo init - git -C $repo config user.email "foobar@example.com" - git -C $repo config user.name "Foobar" -done - -cat > $flake1Dir/flake.nix <<EOF -{ - description = "Bla bla"; - outputs = inputs: rec { - packages.$system = rec { - foo = import ./simple.nix; - default = foo; - }; +for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $nonFlakeDir; do + # Give one repo a non-main initial branch. + extraArgs= + if [[ $repo == $flake2Dir ]]; then + extraArgs="--initial-branch=main" + fi - # To test "nix flake init". - legacyPackages.x86_64-linux.hello = import ./simple.nix; - }; -} -EOF + createGitRepo "$repo" "$extraArgs" +done -cp ./simple.nix ./simple.builder.sh ./config.nix $flake1Dir/ -git -C $flake1Dir add flake.nix simple.nix simple.builder.sh config.nix -git -C $flake1Dir commit -m 'Initial' +createSimpleGitFlake $flake1Dir cat > $flake2Dir/flake.nix <<EOF { @@ -98,12 +68,10 @@ nix registry add --registry $registry flake1 git+file://$flake1Dir nix registry add --registry $registry flake2 git+file://$flake2Dir nix registry add --registry $registry flake3 git+file://$flake3Dir nix registry add --registry $registry flake4 flake3 -nix registry add --registry $registry flake5 hg+file://$flake5Dir nix registry add --registry $registry nixpkgs flake1 -nix registry add --registry $registry templates git+file://$templatesDir # Test 'nix flake list'. -[[ $(nix registry list | wc -l) == 7 ]] +[[ $(nix registry list | wc -l) == 5 ]] # Test 'nix flake metadata'. nix flake metadata flake1 @@ -156,6 +124,7 @@ nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"git+file://$flake1Di # But should succeed in impure mode. (! nix build -o $TEST_ROOT/result flake2#bar --impure) nix build -o $TEST_ROOT/result flake2#bar --impure --no-write-lock-file +nix eval --expr "builtins.getFlake \"$flake2Dir\"" --impure # Building a local flake with an unlocked dependency should fail with --no-update-lock-file. nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes' @@ -165,11 +134,11 @@ nix build -o $TEST_ROOT/result $flake2Dir#bar --no-write-lock-file nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes' nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file [[ -e $flake2Dir/flake.lock ]] -[[ -z $(git -C $flake2Dir diff master) ]] +[[ -z $(git -C $flake2Dir diff main || echo failed) ]] # Rerunning the build should not change the lockfile. nix build -o $TEST_ROOT/result $flake2Dir#bar -[[ -z $(git -C $flake2Dir diff master) ]] +[[ -z $(git -C $flake2Dir diff main || echo failed) ]] # Building with a lockfile should not require a fetch of the registry. nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh @@ -178,7 +147,7 @@ nix build -o $TEST_ROOT/result --no-use-registries $flake2Dir#bar --refresh # Updating the flake should not change the lockfile. nix flake lock $flake2Dir -[[ -z $(git -C $flake2Dir diff master) ]] +[[ -z $(git -C $flake2Dir diff main || echo failed) ]] # Now we should be able to build the flake in pure mode. nix build -o $TEST_ROOT/result flake2#bar @@ -213,7 +182,7 @@ nix build -o $TEST_ROOT/result $flake3Dir#"sth sth" nix build -o $TEST_ROOT/result $flake3Dir#"sth%20sth" # Check whether it saved the lockfile -(! [[ -z $(git -C $flake3Dir diff master) ]]) +[[ -n $(git -C $flake3Dir diff master) ]] git -C $flake3Dir add flake.lock @@ -283,7 +252,7 @@ cat > $flake3Dir/flake.nix <<EOF } EOF -cp ./config.nix $flake3Dir +cp ../config.nix $flake3Dir git -C $flake3Dir add flake.nix config.nix git -C $flake3Dir commit -m 'Add nonFlakeInputs' @@ -313,10 +282,10 @@ nix build -o $TEST_ROOT/result flake4#xyzzy # Test 'nix flake update' and --override-flake. nix flake lock $flake3Dir -[[ -z $(git -C $flake3Dir diff master) ]] +[[ -z $(git -C $flake3Dir diff master || echo failed) ]] nix flake update $flake3Dir --override-flake flake2 nixpkgs -[[ ! -z $(git -C $flake3Dir diff master) ]] +[[ ! -z $(git -C $flake3Dir diff master || echo failed) ]] # Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore git -C $flake3Dir checkout -b removeXyzzy @@ -358,158 +327,19 @@ nix build -o $TEST_ROOT/result flake4/removeXyzzy#sth # Testing the nix CLI nix registry add flake1 flake3 -[[ $(nix registry list | wc -l) == 8 ]] +[[ $(nix registry list | wc -l) == 6 ]] nix registry pin flake1 -[[ $(nix registry list | wc -l) == 8 ]] +[[ $(nix registry list | wc -l) == 6 ]] nix registry pin flake1 flake3 -[[ $(nix registry list | wc -l) == 8 ]] +[[ $(nix registry list | wc -l) == 6 ]] nix registry remove flake1 -[[ $(nix registry list | wc -l) == 7 ]] - -# Test 'nix flake init'. -cat > $templatesDir/flake.nix <<EOF -{ - description = "Some templates"; - - outputs = { self }: { - templates = rec { - trivial = { - path = ./trivial; - description = "A trivial flake"; - }; - default = trivial; - }; - }; -} -EOF - -mkdir $templatesDir/trivial - -cat > $templatesDir/trivial/flake.nix <<EOF -{ - description = "A flake for building Hello World"; - - outputs = { self, nixpkgs }: { - packages.x86_64-linux = rec { - hello = nixpkgs.legacyPackages.x86_64-linux.hello; - default = hello; - }; - }; -} -EOF - -git -C $templatesDir add flake.nix trivial/flake.nix -git -C $templatesDir commit -m 'Initial' - -nix flake check templates -nix flake show templates -nix flake show templates --json | jq - -(cd $flake7Dir && nix flake init) -(cd $flake7Dir && nix flake init) # check idempotence -git -C $flake7Dir add flake.nix -nix flake check $flake7Dir -nix flake show $flake7Dir -nix flake show $flake7Dir --json | jq -git -C $flake7Dir commit -a -m 'Initial' - -# Test 'nix flake new'. -rm -rf $flake6Dir -nix flake new -t templates#trivial $flake6Dir -nix flake new -t templates#trivial $flake6Dir # check idempotence -nix flake check $flake6Dir +[[ $(nix registry list | wc -l) == 5 ]] # Test 'nix flake clone'. rm -rf $TEST_ROOT/flake1-v2 nix flake clone flake1 --dest $TEST_ROOT/flake1-v2 [ -e $TEST_ROOT/flake1-v2/flake.nix ] -# More 'nix flake check' tests. -cat > $flake3Dir/flake.nix <<EOF -{ - outputs = { flake1, self }: { - overlay = final: prev: { - }; - }; -} -EOF - -nix flake check $flake3Dir - -cat > $flake3Dir/flake.nix <<EOF -{ - outputs = { flake1, self }: { - overlay = finalll: prev: { - }; - }; -} -EOF - -(! nix flake check $flake3Dir) - -cat > $flake3Dir/flake.nix <<EOF -{ - outputs = { flake1, self }: { - nixosModules.foo = { - a.b.c = 123; - foo = true; - }; - }; -} -EOF - -nix flake check $flake3Dir - -cat > $flake3Dir/flake.nix <<EOF -{ - outputs = { flake1, self }: { - nixosModules.foo = { - a.b.c = 123; - foo = assert false; true; - }; - }; -} -EOF - -(! nix flake check $flake3Dir) - -cat > $flake3Dir/flake.nix <<EOF -{ - outputs = { flake1, self }: { - nixosModule = { config, pkgs, ... }: { - a.b.c = 123; - }; - }; -} -EOF - -nix flake check $flake3Dir - -cat > $flake3Dir/flake.nix <<EOF -{ - outputs = { flake1, self }: { - nixosModule = { config, pkgs }: { - a.b.c = 123; - }; - }; -} -EOF - -(! nix flake check $flake3Dir) - -cat > $flake3Dir/flake.nix <<EOF -{ - outputs = { flake1, self }: { - packages.system-1.default = "foo"; - packages.system-2.default = "bar"; - }; -} -EOF - -checkRes=$(nix flake check --keep-going $flake3Dir 2>&1 && fail "nix flake check should have failed" || true) -echo "$checkRes" | grep -q "packages.system-1.default" -echo "$checkRes" | grep -q "packages.system-2.default" - # Test 'follows' inputs. cat > $flake3Dir/flake.nix <<EOF { @@ -552,6 +382,10 @@ nix flake lock $flake3Dir [[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2"]' ]] # Test overriding inputs of inputs. +writeTrivialFlake $flake7Dir +git -C $flake7Dir add flake.nix +git -C $flake7Dir commit -m 'Initial' + cat > $flake3Dir/flake.nix <<EOF { inputs.flake2.inputs.flake1 = { @@ -586,50 +420,9 @@ rm -rf $flakeGitBare git clone --bare $flake1Dir $flakeGitBare nix build -o $TEST_ROOT/result git+file://$flakeGitBare -# Test Mercurial flakes. -rm -rf $flake5Dir -mkdir $flake5Dir - -cat > $flake5Dir/flake.nix <<EOF -{ - outputs = { self, flake1 }: { - packages.$system.default = flake1.packages.$system.default; - expr = assert builtins.pathExists ./flake.lock; 123; - }; -} -EOF - -if [[ -n $(type -p hg) ]]; then - hg init $flake5Dir - - hg add $flake5Dir/flake.nix - hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit' - - nix build -o $TEST_ROOT/result hg+file://$flake5Dir - [[ -e $TEST_ROOT/result/hello ]] - - (! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision) - - nix eval hg+file://$flake5Dir#expr - - nix eval hg+file://$flake5Dir#expr - - (! nix eval hg+file://$flake5Dir#expr --no-allow-dirty) - - (! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision) - - hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file' - - nix flake metadata --json hg+file://$flake5Dir --refresh | jq -e -r .revision - nix flake metadata --json hg+file://$flake5Dir - [[ $(nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]] - - nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty - nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-use-registries --no-allow-dirty -fi - # Test path flakes. -rm -rf $flake5Dir/.hg $flake5Dir/flake.lock +mkdir -p $flake5Dir +writeDependentFlake $flake5Dir nix flake lock path://$flake5Dir # Test tarball flakes. @@ -667,165 +460,6 @@ nix flake lock $flake3Dir --update-input flake2/flake1 # Test 'nix flake metadata --json'. nix flake metadata $flake3Dir --json | jq . -# Test circular flake dependencies. -cat > $flakeA/flake.nix <<EOF -{ - inputs.b.url = git+file://$flakeB; - inputs.b.inputs.a.follows = "/"; - - outputs = { self, nixpkgs, b }: { - foo = 123 + b.bar; - xyzzy = 1000; - }; -} -EOF - -git -C $flakeA add flake.nix - -cat > $flakeB/flake.nix <<EOF -{ - inputs.a.url = git+file://$flakeA; - - outputs = { self, nixpkgs, a }: { - bar = 456 + a.xyzzy; - }; -} -EOF - -git -C $flakeB add flake.nix -git -C $flakeB commit -a -m 'Foo' - -[[ $(nix eval $flakeA#foo) = 1579 ]] -[[ $(nix eval $flakeA#foo) = 1579 ]] - -sed -i $flakeB/flake.nix -e 's/456/789/' -git -C $flakeB commit -a -m 'Foo' - -[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]] - -# Test list-inputs with circular dependencies -nix flake metadata $flakeA - -# Test flake follow paths -mkdir -p $flakeFollowsB -mkdir -p $flakeFollowsC -mkdir -p $flakeFollowsD -mkdir -p $flakeFollowsE - -cat > $flakeFollowsA/flake.nix <<EOF -{ - description = "Flake A"; - inputs = { - B = { - url = "path:./flakeB"; - inputs.foobar.follows = "foobar"; - }; - - foobar.url = "path:$flakeFollowsA/flakeE"; - }; - outputs = { ... }: {}; -} -EOF - -cat > $flakeFollowsB/flake.nix <<EOF -{ - description = "Flake B"; - inputs = { - foobar.url = "path:$flakeFollowsA/flakeE"; - goodoo.follows = "C/goodoo"; - C = { - url = "path:./flakeC"; - inputs.foobar.follows = "foobar"; - }; - }; - outputs = { ... }: {}; -} -EOF - -cat > $flakeFollowsC/flake.nix <<EOF -{ - description = "Flake C"; - inputs = { - foobar.url = "path:$flakeFollowsA/flakeE"; - goodoo.follows = "foobar"; - }; - outputs = { ... }: {}; -} -EOF - -cat > $flakeFollowsD/flake.nix <<EOF -{ - description = "Flake D"; - inputs = {}; - outputs = { ... }: {}; -} -EOF - -cat > $flakeFollowsE/flake.nix <<EOF -{ - description = "Flake E"; - inputs = {}; - outputs = { ... }: {}; -} -EOF - -git -C $flakeFollowsA add flake.nix flakeB/flake.nix \ - flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix - -nix flake metadata $flakeFollowsA - -nix flake update $flakeFollowsA - -oldLock="$(cat "$flakeFollowsA/flake.lock")" - -# Ensure that locking twice doesn't change anything - -nix flake lock $flakeFollowsA - -newLock="$(cat "$flakeFollowsA/flake.lock")" - -diff <(echo "$newLock") <(echo "$oldLock") - -[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]] -[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]] -[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]] - -# Ensure removing follows from flake.nix removes them from the lockfile - -cat > $flakeFollowsA/flake.nix <<EOF -{ - description = "Flake A"; - inputs = { - B = { - url = "path:./flakeB"; - inputs.nonFlake.follows = "D"; - }; - D.url = "path:./flakeD"; - }; - outputs = { ... }: {}; -} -EOF - -nix flake lock $flakeFollowsA - -[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]] -jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$" - -# Ensure a relative path is not allowed to go outside the store path -cat > $flakeFollowsA/flake.nix <<EOF -{ - description = "Flake A"; - inputs = { - B.url = "path:../flakeB"; - }; - outputs = { ... }: {}; -} -EOF - -git -C $flakeFollowsA add flake.nix - -nix flake lock $flakeFollowsA 2>&1 | grep 'points outside' - # Test flake in store does not evaluate rm -rf $badFlakeDir mkdir $badFlakeDir diff --git a/tests/flakes/follow-paths.sh b/tests/flakes/follow-paths.sh new file mode 100644 index 000000000..19cc1bafa --- /dev/null +++ b/tests/flakes/follow-paths.sh @@ -0,0 +1,150 @@ +source ./common.sh + +requireGit + +flakeFollowsA=$TEST_ROOT/follows/flakeA +flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB +flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC +flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD +flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE + +# Test following path flakerefs. +createGitRepo $flakeFollowsA +mkdir -p $flakeFollowsB +mkdir -p $flakeFollowsC +mkdir -p $flakeFollowsD +mkdir -p $flakeFollowsE + +cat > $flakeFollowsA/flake.nix <<EOF +{ + description = "Flake A"; + inputs = { + B = { + url = "path:./flakeB"; + inputs.foobar.follows = "foobar"; + }; + + foobar.url = "path:$flakeFollowsA/flakeE"; + }; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsB/flake.nix <<EOF +{ + description = "Flake B"; + inputs = { + foobar.url = "path:$flakeFollowsA/flakeE"; + goodoo.follows = "C/goodoo"; + C = { + url = "path:./flakeC"; + inputs.foobar.follows = "foobar"; + }; + }; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsC/flake.nix <<EOF +{ + description = "Flake C"; + inputs = { + foobar.url = "path:$flakeFollowsA/flakeE"; + goodoo.follows = "foobar"; + }; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsD/flake.nix <<EOF +{ + description = "Flake D"; + inputs = {}; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsE/flake.nix <<EOF +{ + description = "Flake E"; + inputs = {}; + outputs = { ... }: {}; +} +EOF + +git -C $flakeFollowsA add flake.nix flakeB/flake.nix \ + flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix + +nix flake metadata $flakeFollowsA + +nix flake update $flakeFollowsA + +nix flake lock $flakeFollowsA + +oldLock="$(cat "$flakeFollowsA/flake.lock")" + +# Ensure that locking twice doesn't change anything + +nix flake lock $flakeFollowsA + +newLock="$(cat "$flakeFollowsA/flake.lock")" + +diff <(echo "$newLock") <(echo "$oldLock") + +[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]] +[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]] +[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]] + +# Ensure removing follows from flake.nix removes them from the lockfile + +cat > $flakeFollowsA/flake.nix <<EOF +{ + description = "Flake A"; + inputs = { + B = { + url = "path:./flakeB"; + }; + D.url = "path:./flakeD"; + }; + outputs = { ... }: {}; +} +EOF + +nix flake lock $flakeFollowsA + +[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]] +jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$" + +# Ensure a relative path is not allowed to go outside the store path +cat > $flakeFollowsA/flake.nix <<EOF +{ + description = "Flake A"; + inputs = { + B.url = "path:../flakeB"; + }; + outputs = { ... }: {}; +} +EOF + +git -C $flakeFollowsA add flake.nix + +nix flake lock $flakeFollowsA 2>&1 | grep 'points outside' + +# Non-existant follows should print a warning. +cat >$flakeFollowsA/flake.nix <<EOF +{ + description = "Flake A"; + inputs.B = { + url = "path:./flakeB"; + inputs.invalid.follows = "D"; + inputs.invalid2.url = "path:./flakeD"; + }; + inputs.D.url = "path:./flakeD"; + outputs = { ... }: {}; +} +EOF + +git -C $flakeFollowsA add flake.nix + +nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'" +nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'" diff --git a/tests/flakes/init.sh b/tests/flakes/init.sh new file mode 100644 index 000000000..36cb9956a --- /dev/null +++ b/tests/flakes/init.sh @@ -0,0 +1,87 @@ +source ./common.sh + +requireGit + +templatesDir=$TEST_ROOT/templates +flakeDir=$TEST_ROOT/flake +nixpkgsDir=$TEST_ROOT/nixpkgs + +nix registry add --registry $registry templates git+file://$templatesDir +nix registry add --registry $registry nixpkgs git+file://$nixpkgsDir + +createGitRepo $nixpkgsDir +createSimpleGitFlake $nixpkgsDir + +# Test 'nix flake init'. +createGitRepo $templatesDir + +cat > $templatesDir/flake.nix <<EOF +{ + description = "Some templates"; + + outputs = { self }: { + templates = rec { + trivial = { + path = ./trivial; + description = "A trivial flake"; + welcomeText = '' + Welcome to my trivial flake + ''; + }; + default = trivial; + }; + }; +} +EOF + +mkdir $templatesDir/trivial + +cat > $templatesDir/trivial/flake.nix <<EOF +{ + description = "A flake for building Hello World"; + + outputs = { self, nixpkgs }: { + packages.x86_64-linux = rec { + hello = nixpkgs.legacyPackages.x86_64-linux.hello; + default = hello; + }; + }; +} +EOF +echo a > $templatesDir/trivial/a +echo b > $templatesDir/trivial/b + +git -C $templatesDir add flake.nix trivial/ +git -C $templatesDir commit -m 'Initial' + +nix flake check templates +nix flake show templates +nix flake show templates --json | jq + +createGitRepo $flakeDir +(cd $flakeDir && nix flake init) +(cd $flakeDir && nix flake init) # check idempotence +git -C $flakeDir add flake.nix +nix flake check $flakeDir +nix flake show $flakeDir +nix flake show $flakeDir --json | jq +git -C $flakeDir commit -a -m 'Initial' + +# Test 'nix flake init' with benign conflicts +createGitRepo "$flakeDir" +echo a > $flakeDir/a +(cd $flakeDir && nix flake init) # check idempotence + +# Test 'nix flake init' with conflicts +createGitRepo "$flakeDir" +echo b > $flakeDir/a +pushd $flakeDir +(! nix flake init) |& grep "refusing to overwrite existing file '$flakeDir/a'" +popd +git -C $flakeDir commit -a -m 'Changed' + +# Test 'nix flake new'. +rm -rf $flakeDir +nix flake new -t templates#trivial $flakeDir +nix flake new -t templates#trivial $flakeDir # check idempotence +nix flake check $flakeDir diff --git a/tests/flakes/mercurial.sh b/tests/flakes/mercurial.sh new file mode 100644 index 000000000..2614006c8 --- /dev/null +++ b/tests/flakes/mercurial.sh @@ -0,0 +1,46 @@ +source ./common.sh + +if [[ -z $(type -p hg) ]]; then + echo "Mercurial not installed; skipping" + exit 99 +fi + +flake1Dir=$TEST_ROOT/flake-hg1 +mkdir -p $flake1Dir +writeSimpleFlake $flake1Dir +hg init $flake1Dir + +nix registry add --registry $registry flake1 hg+file://$flake1Dir + +flake2Dir=$TEST_ROOT/flake-hg2 +mkdir -p $flake2Dir +writeDependentFlake $flake2Dir +hg init $flake2Dir + +hg add $flake1Dir/* +hg commit --config ui.username=foobar@example.org $flake1Dir -m 'Initial commit' + +hg add $flake2Dir/flake.nix +hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Initial commit' + +nix build -o $TEST_ROOT/result hg+file://$flake2Dir +[[ -e $TEST_ROOT/result/hello ]] + +(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision) + +nix eval hg+file://$flake2Dir#expr + +nix eval hg+file://$flake2Dir#expr + +(! nix eval hg+file://$flake2Dir#expr --no-allow-dirty) + +(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision) + +hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Add lock file' + +nix flake metadata --json hg+file://$flake2Dir --refresh | jq -e -r .revision +nix flake metadata --json hg+file://$flake2Dir +[[ $(nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revCount) = 1 ]] + +nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-registries --no-allow-dirty +nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-use-registries --no-allow-dirty diff --git a/tests/flakes/run.sh b/tests/flakes/run.sh new file mode 100644 index 000000000..9fa51d1c7 --- /dev/null +++ b/tests/flakes/run.sh @@ -0,0 +1,29 @@ +source ../common.sh + +clearStore +rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local +cp ../shell-hello.nix ../config.nix $TEST_HOME +cd $TEST_HOME + +cat <<EOF > flake.nix +{ + outputs = {self}: { + packages.$system.pkgAsPkg = (import ./shell-hello.nix).hello; + packages.$system.appAsApp = self.packages.$system.appAsApp; + + apps.$system.pkgAsApp = self.packages.$system.pkgAsPkg; + apps.$system.appAsApp = { + type = "app"; + program = "\${(import ./shell-hello.nix).hello}/bin/hello"; + }; + }; +} +EOF +nix run --no-write-lock-file .#appAsApp +nix run --no-write-lock-file .#pkgAsPkg + +! nix run --no-write-lock-file .#pkgAsApp || fail "'nix run' shouldn’t accept an 'app' defined under 'packages'" +! nix run --no-write-lock-file .#appAsPkg || fail "elements of 'apps' should be of type 'app'" + +clearStore + diff --git a/tests/flake-searching.sh b/tests/flakes/search-root.sh index db241f6d2..d8586dc8a 100644 --- a/tests/flake-searching.sh +++ b/tests/flakes/search-root.sh @@ -1,15 +1,11 @@ source common.sh -if [[ -z $(type -p git) ]]; then - echo "Git not installed; skipping flake search tests" - exit 99 -fi - clearStore -cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME +writeSimpleFlake $TEST_HOME cd $TEST_HOME mkdir -p foo/subdir + echo '{ outputs = _: {}; }' > foo/flake.nix cat <<EOF > flake.nix { @@ -43,10 +39,12 @@ nix build --override-input foo . || fail "flake should search up directories whe sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix ! nix build || fail "flake should not search upwards when part of inputs" -pushd subdir -git init -for i in "${success[@]}" "${failure[@]}"; do - ! nix build $i || fail "flake should not search past a git repository" -done -rm -rf .git -popd +if [[ -n $(type -p git) ]]; then + pushd subdir + git init + for i in "${success[@]}" "${failure[@]}"; do + ! nix build $i || fail "flake should not search past a git repository" + done + rm -rf .git + popd +fi diff --git a/tests/fmt.sh b/tests/fmt.sh new file mode 100644 index 000000000..254681ca2 --- /dev/null +++ b/tests/fmt.sh @@ -0,0 +1,35 @@ +source common.sh + +set -o pipefail + +clearStore +rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local + +cp ./simple.nix ./simple.builder.sh ./fmt.simple.sh ./config.nix $TEST_HOME + +cd $TEST_HOME + +nix fmt --help | grep "Format" + +cat << EOF > flake.nix +{ + outputs = _: { + formatter.$system = + with import ./config.nix; + mkDerivation { + name = "formatter"; + buildCommand = '' + mkdir -p \$out/bin + echo "#! ${shell}" > \$out/bin/formatter + cat \${./fmt.simple.sh} >> \$out/bin/formatter + chmod +x \$out/bin/formatter + ''; + }; + }; +} +EOF +nix fmt ./file ./folder | grep 'Formatting: ./file ./folder' +nix flake check +nix flake show | grep -P "package 'formatter'" + +clearStore diff --git a/tests/fmt.simple.sh b/tests/fmt.simple.sh new file mode 100755 index 000000000..4c8c67ebb --- /dev/null +++ b/tests/fmt.simple.sh @@ -0,0 +1 @@ +echo Formatting: "${@}" diff --git a/tests/github-flakes.nix b/tests/github-flakes.nix index 7ac397d81..43a4f1432 100644 --- a/tests/github-flakes.nix +++ b/tests/github-flakes.nix @@ -7,7 +7,7 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") { let - # Generate a fake root CA and a fake github.com certificate. + # Generate a fake root CA and a fake api.github.com / github.com / channels.nixos.org certificate. cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; } '' mkdir -p $out @@ -18,7 +18,7 @@ let openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \ -subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr - openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:raw.githubusercontent.com") \ + openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:channels.nixos.org") \ -days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt ''; @@ -37,6 +37,17 @@ let "owner": "NixOS", "repo": "nixpkgs" } + }, + { + "from": { + "type": "indirect", + "id": "private-flake" + }, + "to": { + "type": "github", + "owner": "fancy-enterprise", + "repo": "private-flake" + } } ], "version": 2 @@ -45,20 +56,40 @@ let destination = "/flake-registry.json"; }; - api = pkgs.runCommand "nixpkgs-flake" {} + private-flake-rev = "9f1dd0df5b54a7dc75b618034482ed42ce34383d"; + + private-flake-api = pkgs.runCommand "private-flake" {} '' - mkdir -p $out/tarball + mkdir -p $out/{commits,tarball} - dir=NixOS-nixpkgs-${nixpkgs.shortRev} - cp -prd ${nixpkgs} $dir - # Set the correct timestamp in the tarball. - find $dir -print0 | xargs -0 touch -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- - tar cfz $out/tarball/${nixpkgs.rev} $dir --hard-dereference + # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit + echo '{"sha": "${private-flake-rev}"}' > $out/commits/HEAD + + # Setup tarball download via API + dir=private-flake + mkdir $dir + echo '{ outputs = {...}: {}; }' > $dir/flake.nix + tar cfz $out/tarball/${private-flake-rev} $dir --hard-dereference + ''; + nixpkgs-api = pkgs.runCommand "nixpkgs-flake" {} + '' mkdir -p $out/commits + + # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit echo '{"sha": "${nixpkgs.rev}"}' > $out/commits/HEAD ''; + archive = pkgs.runCommand "nixpkgs-flake" {} + '' + mkdir -p $out/archive + + dir=NixOS-nixpkgs-${nixpkgs.shortRev} + cp -prd ${nixpkgs} $dir + # Set the correct timestamp in the tarball. + find $dir -print0 | xargs -0 touch -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} -- + tar cfz $out/archive/${nixpkgs.rev}.tar.gz $dir --hard-dereference + ''; in makeTest ( @@ -67,7 +98,7 @@ makeTest ( name = "github-flakes"; nodes = - { # Impersonate github.com and api.github.com. + { github = { config, pkgs, ... }: { networking.firewall.allowedTCPPorts = [ 80 443 ]; @@ -77,12 +108,12 @@ makeTest ( services.httpd.extraConfig = '' ErrorLog syslog:local6 ''; - services.httpd.virtualHosts."github.com" = + services.httpd.virtualHosts."channels.nixos.org" = { forceSSL = true; sslServerKey = "${cert}/server.key"; sslServerCert = "${cert}/server.crt"; servedDirs = - [ { urlPath = "/NixOS/flake-registry/raw/master"; + [ { urlPath = "/"; dir = registry; } ]; @@ -93,7 +124,20 @@ makeTest ( sslServerCert = "${cert}/server.crt"; servedDirs = [ { urlPath = "/repos/NixOS/nixpkgs"; - dir = api; + dir = nixpkgs-api; + } + { urlPath = "/repos/fancy-enterprise/private-flake"; + dir = private-flake-api; + } + ]; + }; + services.httpd.virtualHosts."github.com" = + { forceSSL = true; + sslServerKey = "${cert}/server.key"; + sslServerCert = "${cert}/server.crt"; + servedDirs = + [ { urlPath = "/NixOS/nixpkgs"; + dir = archive; } ]; }; @@ -103,13 +147,12 @@ makeTest ( { config, lib, pkgs, nodes, ... }: { virtualisation.writableStore = true; virtualisation.diskSize = 2048; - virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ]; + virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; virtualisation.memorySize = 4096; nix.binaryCaches = lib.mkForce [ ]; nix.extraOptions = "experimental-features = nix-command flakes"; - environment.systemPackages = [ pkgs.jq ]; networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} = - [ "github.com" "api.github.com" "raw.githubusercontent.com" ]; + [ "channels.nixos.org" "api.github.com" "github.com" ]; security.pki.certificateFiles = [ "${cert}/ca.crt" ]; }; }; @@ -121,22 +164,39 @@ makeTest ( start_all() + def cat_log(): + github.succeed("cat /var/log/httpd/*.log >&2") + github.wait_for_unit("httpd.service") client.succeed("curl -v https://github.com/ >&2") - client.succeed("nix registry list | grep nixpkgs") - - rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision") - assert rev.strip() == "${nixpkgs.rev}", "revision mismatch" + out = client.succeed("nix registry list") + print(out) + assert "github:NixOS/nixpkgs" in out, "nixpkgs flake not found" + assert "github:fancy-enterprise/private-flake" in out, "private flake not found" + cat_log() + + # If no github access token is provided, nix should use the public archive url... + out = client.succeed("nix flake metadata nixpkgs --json") + print(out) + info = json.loads(out) + assert info["revision"] == "${nixpkgs.rev}", f"revision mismatch: {info['revision']} != ${nixpkgs.rev}" + cat_log() + + # ... otherwise it should use the API + out = client.succeed("nix flake metadata private-flake --json --access-tokens github.com=ghp_000000000000000000000000000000000000 --tarball-ttl 0") + print(out) + info = json.loads(out) + assert info["revision"] == "${private-flake-rev}", f"revision mismatch: {info['revision']} != ${private-flake-rev}" + cat_log() client.succeed("nix registry pin nixpkgs") - - client.succeed("nix flake info nixpkgs --tarball-ttl 0 >&2") + client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2") # Shut down the web server. The flake should be cached on the client. github.succeed("systemctl stop httpd.service") - info = json.loads(client.succeed("nix flake info nixpkgs --json")) + info = json.loads(client.succeed("nix flake metadata nixpkgs --json")) date = time.strftime("%Y%m%d%H%M%S", time.gmtime(info['lastModified'])) assert date == "${nixpkgs.lastModifiedDate}", "time mismatch" diff --git a/tests/impure-derivations.nix b/tests/impure-derivations.nix new file mode 100644 index 000000000..98547e6c1 --- /dev/null +++ b/tests/impure-derivations.nix @@ -0,0 +1,63 @@ +with import ./config.nix; + +rec { + + impure = mkDerivation { + name = "impure"; + outputs = [ "out" "stuff" ]; + buildCommand = + '' + echo impure + x=$(< $TEST_ROOT/counter) + mkdir $out $stuff + echo $x > $out/n + ln -s $out/n $stuff/bla + printf $((x + 1)) > $TEST_ROOT/counter + ''; + __impure = true; + impureEnvVars = [ "TEST_ROOT" ]; + }; + + impureOnImpure = mkDerivation { + name = "impure-on-impure"; + buildCommand = + '' + echo impure-on-impure + x=$(< ${impure}/n) + mkdir $out + printf X$x > $out/n + ln -s ${impure.stuff} $out/symlink + ln -s $out $out/self + ''; + __impure = true; + }; + + # This is not allowed. + inputAddressed = mkDerivation { + name = "input-addressed"; + buildCommand = + '' + cat ${impure} > $out + ''; + }; + + contentAddressed = mkDerivation { + name = "content-addressed"; + buildCommand = + '' + echo content-addressed + x=$(< ${impureOnImpure}/n) + printf ''${x:0:1} > $out + ''; + outputHashMode = "recursive"; + outputHash = "sha256-eBYxcgkuWuiqs4cKNgKwkb3vY/HR0vVsJnqe8itJGcQ="; + }; + + inputAddressedAfterCA = mkDerivation { + name = "input-addressed-after-ca"; + buildCommand = + '' + cat ${contentAddressed} > $out + ''; + }; +} diff --git a/tests/impure-derivations.sh b/tests/impure-derivations.sh new file mode 100644 index 000000000..35ae3f5d3 --- /dev/null +++ b/tests/impure-derivations.sh @@ -0,0 +1,57 @@ +source common.sh + +requireDaemonNewerThan "2.8pre20220311" + +enableFeatures "ca-derivations ca-references impure-derivations" +restartDaemon + +set -o pipefail + +clearStore + +# Basic test of impure derivations: building one a second time should not use the previous result. +printf 0 > $TEST_ROOT/counter + +json=$(nix build -L --no-link --json --file ./impure-derivations.nix impure.all) +path1=$(echo $json | jq -r .[].outputs.out) +path1_stuff=$(echo $json | jq -r .[].outputs.stuff) +[[ $(< $path1/n) = 0 ]] +[[ $(< $path1_stuff/bla) = 0 ]] + +[[ $(nix path-info --json $path1 | jq .[].ca) =~ fixed:r:sha256: ]] + +path2=$(nix build -L --no-link --json --file ./impure-derivations.nix impure | jq -r .[].outputs.out) +[[ $(< $path2/n) = 1 ]] + +# Test impure derivations that depend on impure derivations. +path3=$(nix build -L --no-link --json --file ./impure-derivations.nix impureOnImpure | jq -r .[].outputs.out) +[[ $(< $path3/n) = X2 ]] + +path4=$(nix build -L --no-link --json --file ./impure-derivations.nix impureOnImpure | jq -r .[].outputs.out) +[[ $(< $path4/n) = X3 ]] + +# Test that (self-)references work. +[[ $(< $path4/symlink/bla) = 3 ]] +[[ $(< $path4/self/n) = X3 ]] + +# Input-addressed derivations cannot depend on impure derivations directly. +(! nix build -L --no-link --json --file ./impure-derivations.nix inputAddressed 2>&1) | grep 'depends on impure derivation' + +drvPath=$(nix eval --json --file ./impure-derivations.nix impure.drvPath | jq -r .) +[[ $(nix show-derivation $drvPath | jq ".[\"$drvPath\"].outputs.out.impure") = true ]] +[[ $(nix show-derivation $drvPath | jq ".[\"$drvPath\"].outputs.stuff.impure") = true ]] + +# Fixed-output derivations *can* depend on impure derivations. +path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAddressed | jq -r .[].outputs.out) +[[ $(< $path5) = X ]] +[[ $(< $TEST_ROOT/counter) = 5 ]] + +# And they should not be rebuilt. +path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAddressed | jq -r .[].outputs.out) +[[ $(< $path5) = X ]] +[[ $(< $TEST_ROOT/counter) = 5 ]] + +# Input-addressed derivations can depend on fixed-output derivations that depend on impure derivations. +path6=$(nix build -L --no-link --json --file ./impure-derivations.nix inputAddressedAfterCA | jq -r .[].outputs.out) +[[ $(< $path6) = X ]] +[[ $(< $TEST_ROOT/counter) = 5 ]] diff --git a/tests/installer/default.nix b/tests/installer/default.nix new file mode 100644 index 000000000..32aa7889a --- /dev/null +++ b/tests/installer/default.nix @@ -0,0 +1,220 @@ +{ binaryTarballs +, nixpkgsFor +}: + +let + + installScripts = { + install-default = { + script = '' + tar -xf ./nix.tar.xz + mv ./nix-* nix + ./nix/install --no-channel-add + ''; + }; + + install-force-no-daemon = { + script = '' + tar -xf ./nix.tar.xz + mv ./nix-* nix + ./nix/install --no-daemon + ''; + }; + + install-force-daemon = { + script = '' + tar -xf ./nix.tar.xz + mv ./nix-* nix + ./nix/install --daemon --no-channel-add + ''; + }; + }; + + disableSELinux = "sudo setenforce 0"; + + images = { + + /* + "ubuntu-14-04" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/ubuntu/boxes/trusty64/versions/20190514.0.0/providers/virtualbox.box"; + hash = "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="; + }; + rootDisk = "box-disk1.vmdk"; + system = "x86_64-linux"; + }; + */ + + "ubuntu-16-04" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/generic/boxes/ubuntu1604/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-lO4oYQR2tCh5auxAYe6bPOgEqOgv3Y3GC1QM1tEEEU8="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; + }; + + "ubuntu-22-04" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/generic/boxes/ubuntu2204/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-HNll0Qikw/xGIcogni5lz01vUv+R3o8xowP2EtqjuUQ="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; + }; + + "fedora-36" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/generic/boxes/fedora36/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-rxPgnDnFkTDwvdqn2CV3ZUo3re9AdPtSZ9SvOHNvaks="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; + postBoot = disableSELinux; + }; + + # Currently fails with 'error while loading shared libraries: + # libsodium.so.23: cannot stat shared object: Invalid argument'. + /* + "rhel-6" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/generic/boxes/rhel6/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-QwzbvRoRRGqUCQptM7X/InRWFSP2sqwRt2HaaO6zBGM="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; + }; + */ + + "rhel-7" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/generic/boxes/rhel7/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; + }; + + "rhel-8" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/generic/boxes/rhel8/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-zFOPjSputy1dPgrQRixBXmlyN88cAKjJ21VvjSWUCUY="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; + postBoot = disableSELinux; + }; + + "rhel-9" = { + image = import <nix/fetchurl.nix> { + url = "https://app.vagrantup.com/generic/boxes/rhel9/versions/4.1.12/providers/libvirt.box"; + hash = "sha256-vL/FbB3kK1rcSaR627nWmScYGKGk4seSmAdq6N5diMg="; + }; + rootDisk = "box.img"; + system = "x86_64-linux"; + postBoot = disableSELinux; + extraQemuOpts = "-cpu Westmere-v2"; + }; + + }; + + makeTest = imageName: testName: + let image = images.${imageName}; in + with nixpkgsFor.${image.system}; + runCommand + "installer-test-${imageName}-${testName}" + { buildInputs = [ qemu_kvm openssh ]; + image = image.image; + postBoot = image.postBoot or ""; + installScript = installScripts.${testName}.script; + binaryTarball = binaryTarballs.${system}; + } + '' + shopt -s nullglob + + echo "Unpacking Vagrant box $image..." + tar xvf $image + + image_type=$(qemu-img info ${image.rootDisk} | sed 's/file format: \(.*\)/\1/; t; d') + + qemu-img create -b ./${image.rootDisk} -F "$image_type" -f qcow2 ./disk.qcow2 + + extra_qemu_opts="${image.extraQemuOpts or ""}" + + # Add the config disk, required by the Ubuntu images. + config_drive=$(echo *configdrive.vmdk || true) + if [[ -n $config_drive ]]; then + extra_qemu_opts+=" -drive id=disk2,file=$config_drive,if=virtio" + fi + + echo "Starting qemu..." + qemu-kvm -m 4096 -nographic \ + -drive id=disk1,file=./disk.qcow2,if=virtio \ + -netdev user,id=net0,restrict=yes,hostfwd=tcp::20022-:22 -device virtio-net-pci,netdev=net0 \ + $extra_qemu_opts & + qemu_pid=$! + trap "kill $qemu_pid" EXIT + + if ! [ -e ./vagrant_insecure_key ]; then + cp ${./vagrant_insecure_key} vagrant_insecure_key + fi + + chmod 0400 ./vagrant_insecure_key + + ssh_opts="-o StrictHostKeyChecking=no -o HostKeyAlgorithms=+ssh-rsa -o PubkeyAcceptedKeyTypes=+ssh-rsa -i ./vagrant_insecure_key" + ssh="ssh -p 20022 -q $ssh_opts vagrant@localhost" + + echo "Waiting for SSH..." + for ((i = 0; i < 120; i++)); do + echo "[ssh] Trying to connect..." + if $ssh -- true; then + echo "[ssh] Connected!" + break + fi + if ! kill -0 $qemu_pid; then + echo "qemu died unexpectedly" + exit 1 + fi + sleep 1 + done + + if [[ -n $postBoot ]]; then + echo "Running post-boot commands..." + $ssh "set -ex; $postBoot" + fi + + echo "Copying installer..." + scp -P 20022 $ssh_opts $binaryTarball/nix-*.tar.xz vagrant@localhost:nix.tar.xz + + echo "Running installer..." + $ssh "set -eux; $installScript" + + echo "Testing Nix installation..." + $ssh <<EOF + set -ex + + # FIXME: get rid of this; ideally ssh should just work. + source ~/.bash_profile || true + source ~/.bash_login || true + source ~/.profile || true + source /etc/bashrc || true + + nix-env --version + nix --extra-experimental-features nix-command store ping + + out=\$(nix-build --no-substitute -E 'derivation { name = "foo"; system = "x86_64-linux"; builder = "/bin/sh"; args = ["-c" "echo foobar > \$out"]; }') + [[ \$(cat \$out) = foobar ]] + EOF + + echo "Done!" + touch $out + ''; + +in + +builtins.mapAttrs (imageName: image: + { ${image.system} = builtins.mapAttrs (testName: test: + makeTest imageName testName + ) installScripts; + } +) images diff --git a/tests/installer/vagrant_insecure_key b/tests/installer/vagrant_insecure_key new file mode 100644 index 000000000..7d6a08390 --- /dev/null +++ b/tests/installer/vagrant_insecure_key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEogIBAAKCAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzI +w+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoP +kcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2 +hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NO +Td0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcW +yLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQIBIwKCAQEA4iqWPJXtzZA68mKd +ELs4jJsdyky+ewdZeNds5tjcnHU5zUYE25K+ffJED9qUWICcLZDc81TGWjHyAqD1 +Bw7XpgUwFgeUJwUlzQurAv+/ySnxiwuaGJfhFM1CaQHzfXphgVml+fZUvnJUTvzf +TK2Lg6EdbUE9TarUlBf/xPfuEhMSlIE5keb/Zz3/LUlRg8yDqz5w+QWVJ4utnKnK +iqwZN0mwpwU7YSyJhlT4YV1F3n4YjLswM5wJs2oqm0jssQu/BT0tyEXNDYBLEF4A +sClaWuSJ2kjq7KhrrYXzagqhnSei9ODYFShJu8UWVec3Ihb5ZXlzO6vdNQ1J9Xsf +4m+2ywKBgQD6qFxx/Rv9CNN96l/4rb14HKirC2o/orApiHmHDsURs5rUKDx0f9iP +cXN7S1uePXuJRK/5hsubaOCx3Owd2u9gD6Oq0CsMkE4CUSiJcYrMANtx54cGH7Rk +EjFZxK8xAv1ldELEyxrFqkbE4BKd8QOt414qjvTGyAK+OLD3M2QdCQKBgQDtx8pN +CAxR7yhHbIWT1AH66+XWN8bXq7l3RO/ukeaci98JfkbkxURZhtxV/HHuvUhnPLdX +3TwygPBYZFNo4pzVEhzWoTtnEtrFueKxyc3+LjZpuo+mBlQ6ORtfgkr9gBVphXZG +YEzkCD3lVdl8L4cw9BVpKrJCs1c5taGjDgdInQKBgHm/fVvv96bJxc9x1tffXAcj +3OVdUN0UgXNCSaf/3A/phbeBQe9xS+3mpc4r6qvx+iy69mNBeNZ0xOitIjpjBo2+ +dBEjSBwLk5q5tJqHmy/jKMJL4n9ROlx93XS+njxgibTvU6Fp9w+NOFD/HvxB3Tcz +6+jJF85D5BNAG3DBMKBjAoGBAOAxZvgsKN+JuENXsST7F89Tck2iTcQIT8g5rwWC +P9Vt74yboe2kDT531w8+egz7nAmRBKNM751U/95P9t88EDacDI/Z2OwnuFQHCPDF +llYOUI+SpLJ6/vURRbHSnnn8a/XG+nzedGH5JGqEJNQsz+xT2axM0/W/CRknmGaJ +kda/AoGANWrLCz708y7VYgAtW2Uf1DPOIYMdvo6fxIB5i9ZfISgcJ/bbCUkFrhoH ++vq/5CIWxCPp0f85R4qxxQ5ihxJ0YDQT9Jpx4TMss4PSavPaBH3RXow5Ohe+bYoQ +NE5OgEXk2wVfZczCZpigBKbKZHNYcelXtTt/nP3rsCuGcM4h53s= +-----END RSA PRIVATE KEY----- diff --git a/tests/lang.sh b/tests/lang.sh index 61bb444ba..c0b0fc58c 100644 --- a/tests/lang.sh +++ b/tests/lang.sh @@ -4,6 +4,9 @@ export TEST_VAR=foo # for eval-okay-getenv.nix export NIX_REMOTE=dummy:// nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello +nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 +nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello +(! nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello) (! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello) nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello @@ -14,7 +17,7 @@ fail=0 for i in lang/parse-fail-*.nix; do echo "parsing $i (should fail)"; i=$(basename $i .nix) - if nix-instantiate --parse - < lang/$i.nix; then + if ! expect 1 nix-instantiate --parse - < lang/$i.nix; then echo "FAIL: $i shouldn't parse" fail=1 fi @@ -23,7 +26,7 @@ done for i in lang/parse-okay-*.nix; do echo "parsing $i (should succeed)"; i=$(basename $i .nix) - if ! nix-instantiate --parse - < lang/$i.nix > lang/$i.out; then + if ! expect 0 nix-instantiate --parse - < lang/$i.nix > lang/$i.out; then echo "FAIL: $i should parse" fail=1 fi @@ -32,7 +35,7 @@ done for i in lang/eval-fail-*.nix; do echo "evaluating $i (should fail)"; i=$(basename $i .nix) - if nix-instantiate --eval lang/$i.nix; then + if ! expect 1 nix-instantiate --eval lang/$i.nix; then echo "FAIL: $i shouldn't evaluate" fail=1 fi @@ -47,7 +50,7 @@ for i in lang/eval-okay-*.nix; do if test -e lang/$i.flags; then flags=$(cat lang/$i.flags) fi - if ! NIX_PATH=lang/dir3:lang/dir4 nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then + if ! expect 0 env NIX_PATH=lang/dir3:lang/dir4 nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then echo "FAIL: $i should evaluate" fail=1 elif ! diff lang/$i.out lang/$i.exp; then @@ -57,7 +60,7 @@ for i in lang/eval-okay-*.nix; do fi if test -e lang/$i.exp.xml; then - if ! nix-instantiate --eval --xml --no-location --strict \ + if ! expect 0 nix-instantiate --eval --xml --no-location --strict \ lang/$i.nix > lang/$i.out.xml; then echo "FAIL: $i should evaluate" fail=1 diff --git a/tests/lang/eval-okay-eq.exp b/tests/lang/eval-okay-eq.exp new file mode 100644 index 000000000..27ba77dda --- /dev/null +++ b/tests/lang/eval-okay-eq.exp @@ -0,0 +1 @@ +true diff --git a/tests/lang/eval-okay-eq.exp.disabled b/tests/lang/eval-okay-eq.exp.disabled deleted file mode 100644 index 2015847b6..000000000 --- a/tests/lang/eval-okay-eq.exp.disabled +++ /dev/null @@ -1 +0,0 @@ -Bool(True) diff --git a/tests/lang/eval-okay-fromjson.nix b/tests/lang/eval-okay-fromjson.nix index 102ee82b5..e1c0f86cc 100644 --- a/tests/lang/eval-okay-fromjson.nix +++ b/tests/lang/eval-okay-fromjson.nix @@ -1,36 +1,35 @@ -# RFC 7159, section 13. builtins.fromJSON '' { - "Image": { - "Width": 800, - "Height": 600, - "Title": "View from 15th Floor", - "Thumbnail": { - "Url": "http://www.example.com/image/481989943", - "Height": 125, - "Width": 100 + "Video": { + "Title": "The Penguin Chronicles", + "Width": 1920, + "Height": 1080, + "EmbeddedData": [3.14159, 23493,null, true ,false, -10], + "Thumb": { + "Url": "http://www.example.com/video/5678931", + "Width": 200, + "Height": 250 }, - "Animated" : false, - "IDs": [116, 943, 234, 38793, true ,false,null, -100], - "Latitude": 37.7668, - "Longitude": -122.3959 + "Subtitle" : false, + "Latitude": 46.2051, + "Longitude": 6.0723 } } '' == - { Image = - { Width = 800; - Height = 600; - Title = "View from 15th Floor"; - Thumbnail = - { Url = http://www.example.com/image/481989943; - Height = 125; - Width = 100; + { Video = + { Title = "The Penguin Chronicles"; + Width = 1920; + Height = 1080; + EmbeddedData = [ 3.14159 23493 null true false (0-10) ]; + Thumb = + { Url = "http://www.example.com/video/5678931"; + Width = 200; + Height = 250; }; - Animated = false; - IDs = [ 116 943 234 38793 true false null (0-100) ]; - Latitude = 37.7668; - Longitude = -122.3959; + Subtitle = false; + Latitude = 46.2051; + Longitude = 6.0723; }; } diff --git a/tests/lang/eval-okay-versions.nix b/tests/lang/eval-okay-versions.nix index e63c36586..e9111f5f4 100644 --- a/tests/lang/eval-okay-versions.nix +++ b/tests/lang/eval-okay-versions.nix @@ -4,6 +4,7 @@ let name2 = "hello"; name3 = "915resolution-0.5.2"; name4 = "xf86-video-i810-1.7.4"; + name5 = "name-that-ends-with-dash--1.0"; eq = 0; lt = builtins.sub 0 1; @@ -23,6 +24,8 @@ let ((builtins.parseDrvName name3).version == "0.5.2") ((builtins.parseDrvName name4).name == "xf86-video-i810") ((builtins.parseDrvName name4).version == "1.7.4") + ((builtins.parseDrvName name5).name == "name-that-ends-with-dash") + ((builtins.parseDrvName name5).version == "-1.0") (versionTest "1.0" "2.3" lt) (versionTest "2.1" "2.3" lt) (versionTest "2.3" "2.3" eq) diff --git a/tests/lang/parse-fail-eof-in-string.nix b/tests/lang/parse-fail-eof-in-string.nix new file mode 100644 index 000000000..19775d2ec --- /dev/null +++ b/tests/lang/parse-fail-eof-in-string.nix @@ -0,0 +1,3 @@ +# https://github.com/NixOS/nix/issues/6562 +# Note that this file must not end with a newline. +a 1"$
\ No newline at end of file diff --git a/tests/local.mk b/tests/local.mk index 53a9179a3..340817ec3 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -1,5 +1,12 @@ nix_tests = \ - flakes.sh \ + flakes/flakes.sh \ + flakes/run.sh \ + flakes/mercurial.sh \ + flakes/circular.sh \ + flakes/init.sh \ + flakes/follow-paths.sh \ + flakes/bundle.sh \ + flakes/check.sh \ ca/gc.sh \ gc.sh \ remote-store.sh \ @@ -21,6 +28,8 @@ nix_tests = \ tarball.sh \ fetchGit.sh \ fetchurl.sh \ + fetchPath.sh \ + fetchTree-file.sh \ simple.sh \ referrers.sh \ optimise-store.sh \ @@ -41,7 +50,7 @@ nix_tests = \ secure-drv-outputs.sh \ restricted.sh \ fetchGitSubmodules.sh \ - flake-searching.sh \ + flakes/search-root.sh \ ca/duplicate-realisation-in-closure.sh \ readfile-context.sh \ nix-channel.sh \ @@ -52,6 +61,7 @@ nix_tests = \ build-remote-content-addressed-floating.sh \ nar-access.sh \ pure-eval.sh \ + eval.sh \ ca/post-hook.sh \ repl.sh \ ca/repl.sh \ @@ -76,7 +86,8 @@ nix_tests = \ nix-copy-ssh.sh \ post-hook.sh \ function-trace.sh \ - flake-local-settings.sh \ + flakes/config.sh \ + fmt.sh \ eval-store.sh \ why-depends.sh \ import-derivation.sh \ @@ -88,11 +99,18 @@ nix_tests = \ plugins.sh \ build.sh \ ca/nix-run.sh \ + selfref-gc.sh ca/selfref-gc.sh \ db-migration.sh \ - nix-profile.sh \ + bash-profile.sh \ pass-as-file.sh \ describe-stores.sh \ - store-ping.sh + nix-profile.sh \ + suggestions.sh \ + store-ping.sh \ + fetchClosure.sh \ + completions.sh \ + impure-derivations.sh \ + path-from-hash-part.sh ifeq ($(HAVE_LIBCPUID), 1) nix_tests += compute-levels.sh @@ -104,4 +122,8 @@ tests-environment = NIX_REMOTE= $(bash) -e clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix -test-deps += tests/common.sh tests/config.nix tests/ca/config.nix tests/plugins/libplugintest.$(SO_EXT) +test-deps += tests/common.sh tests/config.nix tests/ca/config.nix + +ifeq ($(BUILD_SHARED_LIBS), 1) + test-deps += tests/plugins/libplugintest.$(SO_EXT) +endif diff --git a/tests/logging.sh b/tests/logging.sh index c894ad3ff..1481b9b36 100644 --- a/tests/logging.sh +++ b/tests/logging.sh @@ -13,3 +13,14 @@ rm -rf $NIX_LOG_DIR (! nix-store -l $path) nix-build dependencies.nix --no-out-link --compress-build-log [ "$(nix-store -l $path)" = FOO ] + +# test whether empty logs work fine with `nix log`. +builder="$(mktemp)" +echo -e "#!/bin/sh\nmkdir \$out" > "$builder" +outp="$(nix-build -E \ + 'with import ./config.nix; mkDerivation { name = "fnord"; builder = '"$builder"'; }' \ + --out-link "$(mktemp -d)/result")" + +test -d "$outp" + +nix log "$outp" diff --git a/tests/multiple-outputs.nix b/tests/multiple-outputs.nix index b915493f7..624a5dade 100644 --- a/tests/multiple-outputs.nix +++ b/tests/multiple-outputs.nix @@ -80,4 +80,11 @@ rec { ''; }).a; + e = mkDerivation { + name = "multiple-outputs-e"; + outputs = [ "a" "b" "c" ]; + meta.outputsToInstall = [ "a" "b" ]; + buildCommand = "mkdir $a $b $c"; + }; + } diff --git a/tests/nix-copy-closure.nix b/tests/nix-copy-closure.nix index 1b63a3fca..ba8b2cfc9 100644 --- a/tests/nix-copy-closure.nix +++ b/tests/nix-copy-closure.nix @@ -14,7 +14,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk { client = { config, lib, pkgs, ... }: { virtualisation.writableStore = true; - virtualisation.pathsInNixDB = [ pkgA pkgD.drvPath ]; + virtualisation.additionalPaths = [ pkgA pkgD.drvPath ]; nix.binaryCaches = lib.mkForce [ ]; }; @@ -22,7 +22,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk { config, pkgs, ... }: { services.openssh.enable = true; virtualisation.writableStore = true; - virtualisation.pathsInNixDB = [ pkgB pkgC ]; + virtualisation.additionalPaths = [ pkgB pkgC ]; }; }; diff --git a/tests/nix-profile.sh b/tests/nix-profile.sh index e2e0d1090..7ba3235fa 100644 --- a/tests/nix-profile.sh +++ b/tests/nix-profile.sh @@ -1,9 +1,140 @@ source common.sh -sed -e "s|@localstatedir@|$TEST_ROOT/profile-var|g" -e "s|@coreutils@|$coreutils|g" < ../scripts/nix-profile.sh.in > $TEST_ROOT/nix-profile.sh +clearStore +clearProfiles -user=$(whoami) -rm -rf $TEST_HOME $TEST_ROOT/profile-var -mkdir -p $TEST_HOME -USER=$user $SHELL -e -c ". $TEST_ROOT/nix-profile.sh; set" -USER=$user $SHELL -e -c ". $TEST_ROOT/nix-profile.sh" # test idempotency +enableFeatures "ca-derivations" +restartDaemon + +# Make a flake. +flake1Dir=$TEST_ROOT/flake1 +mkdir -p $flake1Dir + +cat > $flake1Dir/flake.nix <<EOF +{ + description = "Bla bla"; + + outputs = { self }: with import ./config.nix; rec { + packages.$system.default = mkDerivation { + name = "profile-test-\${builtins.readFile ./version}"; + outputs = [ "out" "man" "dev" ]; + builder = builtins.toFile "builder.sh" + '' + mkdir -p \$out/bin + cat > \$out/bin/hello <<EOF + #! ${shell} + echo Hello \${builtins.readFile ./who} + EOF + chmod +x \$out/bin/hello + echo DONE + mkdir -p \$man/share/man + mkdir -p \$dev/include + ''; + __contentAddressed = import ./ca.nix; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + meta.outputsToInstall = [ "out" "man" ]; + }; + }; +} +EOF + +printf World > $flake1Dir/who +printf 1.0 > $flake1Dir/version +printf false > $flake1Dir/ca.nix + +cp ./config.nix $flake1Dir/ + +# Test upgrading from nix-env. +nix-env -f ./user-envs.nix -i foo-1.0 +nix profile list | grep '0 - - .*-foo-1.0' +nix profile install $flake1Dir -L +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] +[ -e $TEST_HOME/.nix-profile/share/man ] +(! [ -e $TEST_HOME/.nix-profile/include ]) +nix profile history +nix profile history | grep "packages.$system.default: ∅ -> 1.0" +nix profile diff-closures | grep 'env-manifest.nix: ε → ∅' + +# Test upgrading a package. +printf NixOS > $flake1Dir/who +printf 2.0 > $flake1Dir/version +nix profile upgrade 1 +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]] +nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 2.0, 2.0-man" + +# Test 'history', 'diff-closures'. +nix profile diff-closures + +# Test rollback. +nix profile rollback +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] + +# Test uninstall. +[ -e $TEST_HOME/.nix-profile/bin/foo ] +nix profile remove 0 +(! [ -e $TEST_HOME/.nix-profile/bin/foo ]) +nix profile history | grep 'foo: 1.0 -> ∅' +nix profile diff-closures | grep 'Version 3 -> 4' + +# Test installing a non-flake package. +nix profile install --file ./simple.nix '' +[[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] +nix profile remove 1 +nix profile install $(nix-build --no-out-link ./simple.nix) +[[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] + +# Test wipe-history. +nix profile wipe-history +[[ $(nix profile history | grep Version | wc -l) -eq 1 ]] + +# Test upgrade to CA package. +printf true > $flake1Dir/ca.nix +printf 3.0 > $flake1Dir/version +nix profile upgrade 0 +nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man" + +# Test new install of CA package. +nix profile remove 0 +printf 4.0 > $flake1Dir/version +printf Utrecht > $flake1Dir/who +nix profile install $flake1Dir +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]] +[[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]] + +# Override the outputs. +nix profile remove 0 1 +nix profile install "$flake1Dir^*" +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]] +[ -e $TEST_HOME/.nix-profile/share/man ] +[ -e $TEST_HOME/.nix-profile/include ] + +printf Nix > $flake1Dir/who +nix profile upgrade 0 +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Nix" ]] +[ -e $TEST_HOME/.nix-profile/share/man ] +[ -e $TEST_HOME/.nix-profile/include ] + +nix profile remove 0 +nix profile install "$flake1Dir^man" +(! [ -e $TEST_HOME/.nix-profile/bin/hello ]) +[ -e $TEST_HOME/.nix-profile/share/man ] +(! [ -e $TEST_HOME/.nix-profile/include ]) + +# test priority +nix profile remove 0 + +# Make another flake. +flake2Dir=$TEST_ROOT/flake2 +printf World > $flake1Dir/who +cp -r $flake1Dir $flake2Dir +printf World2 > $flake2Dir/who + +nix profile install $flake1Dir +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] +nix profile install $flake2Dir --priority 100 +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] +nix profile install $flake2Dir --priority 0 +[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World2" ]] +# nix profile install $flake1Dir --priority 100 +# [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh index 3241d7a0f..f291c6f79 100644 --- a/tests/nix-shell.sh +++ b/tests/nix-shell.sh @@ -102,3 +102,11 @@ source <(nix print-dev-env -f "$shellDotNix" shellDrv) [[ ${arr2[1]} = $'\n' ]] [[ ${arr2[2]} = $'x\ny' ]] [[ $(fun) = blabla ]] + +# Test nix-shell with ellipsis and no `inNixShell` argument (for backwards compat with old nixpkgs) +cat >$TEST_ROOT/shell-ellipsis.nix <<EOF +{ system ? "x86_64-linux", ... }@args: +assert (!(args ? inNixShell)); +(import $shellDotNix { }).shellDrv +EOF +nix-shell $TEST_ROOT/shell-ellipsis.nix --run "true" diff --git a/tests/nss-preload.nix b/tests/nss-preload.nix index 2610d2b30..64b655ba2 100644 --- a/tests/nss-preload.nix +++ b/tests/nss-preload.nix @@ -5,6 +5,42 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") { extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ]; }; +let + nix-fetch = pkgs.writeText "fetch.nix" '' + derivation { + # This derivation is an copy from what is available over at + # nix.git:corepkgs/fetchurl.nix + builder = "builtin:fetchurl"; + + # We're going to fetch data from the http_dns instance created before + # we expect the content to be the same as the content available there. + # ``` + # $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1) + # 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59 + # ``` + outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59"; + outputHashAlgo = "sha256"; + outputHashMode = "flat"; + + name = "example.com"; + url = "http://example.com"; + + unpack = false; + executable = false; + + system = "builtin"; + + preferLocalBuild = true; + + impureEnvVars = [ + "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" + ]; + + urls = [ "http://example.com" ]; + } + ''; +in + makeTest ( rec { @@ -68,40 +104,6 @@ rec { }; }; - nix-fetch = pkgs.writeText "fetch.nix" '' - derivation { - # This derivation is an copy from what is available over at - # nix.git:corepkgs/fetchurl.nix - builder = "builtin:fetchurl"; - - # We're going to fetch data from the http_dns instance created before - # we expect the content to be the same as the content available there. - # ``` - # $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1) - # 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59 - # ``` - outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59"; - outputHashAlgo = "sha256"; - outputHashMode = "flat"; - - name = "example.com"; - url = "http://example.com"; - - unpack = false; - executable = false; - - system = "builtin"; - - preferLocalBuild = true; - - impureEnvVars = [ - "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" - ]; - - urls = [ "http://example.com" ]; - } - ''; - testScript = { nodes, ... }: '' http_dns.wait_for_unit("nginx") http_dns.wait_for_open_port(80) diff --git a/tests/path-from-hash-part.sh b/tests/path-from-hash-part.sh new file mode 100644 index 000000000..bdd104434 --- /dev/null +++ b/tests/path-from-hash-part.sh @@ -0,0 +1,10 @@ +source common.sh + +path=$(nix build --no-link --print-out-paths -f simple.nix) + +hash_part=$(basename $path) +hash_part=${hash_part:0:32} + +path2=$(nix store path-from-hash-part $hash_part) + +[[ $path = $path2 ]] diff --git a/tests/plugins.sh b/tests/plugins.sh index e22bf4408..6e278ad9d 100644 --- a/tests/plugins.sh +++ b/tests/plugins.sh @@ -2,6 +2,11 @@ source common.sh set -o pipefail +if [[ $BUILD_SHARED_LIBS != 1 ]]; then + echo "plugins are not supported" + exit 99 +fi + res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull) [ "$res"x = "nullx" ] diff --git a/tests/plugins/plugintest.cc b/tests/plugins/plugintest.cc index cd7c9f8b1..04b791021 100644 --- a/tests/plugins/plugintest.cc +++ b/tests/plugins/plugintest.cc @@ -13,7 +13,7 @@ MySettings mySettings; static GlobalConfig::Register rs(&mySettings); -static void prim_anotherNull (EvalState & state, const Pos & pos, Value ** args, Value & v) +static void prim_anotherNull (EvalState & state, const PosIdx pos, Value ** args, Value & v) { if (mySettings.settingSet) v.mkNull(); diff --git a/tests/post-hook.sh b/tests/post-hook.sh index 049e40749..4eff5f511 100644 --- a/tests/post-hook.sh +++ b/tests/post-hook.sh @@ -9,12 +9,12 @@ echo 'require-sigs = false' >> $NIX_CONF_DIR/nix.conf restartDaemon -# Build the dependencies and push them to the remote store +# Build the dependencies and push them to the remote store. nix-build -o $TEST_ROOT/result dependencies.nix --post-build-hook $PWD/push-to-store.sh clearStore -# Ensure that we the remote store contains both the runtime and buildtime -# closure of what we've just built +# Ensure that the remote store contains both the runtime and build-time +# closure of what we've just built. nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix input1_drv diff --git a/tests/pure-eval.sh b/tests/pure-eval.sh index 1a4568ea6..b83ab8afe 100644 --- a/tests/pure-eval.sh +++ b/tests/pure-eval.sh @@ -30,3 +30,5 @@ nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ x = "foo" + " rm -rf $TEST_ROOT/eval-out (! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }') + +(! nix eval --expr '~/foo') diff --git a/tests/push-to-store.sh b/tests/push-to-store.sh index 25352c751..b1495c9e2 100755 --- a/tests/push-to-store.sh +++ b/tests/push-to-store.sh @@ -1,6 +1,10 @@ #!/bin/sh set -x +set -e + +[ -n "$OUT_PATHS" ] +[ -n "$DRV_PATH" ] echo Pushing "$OUT_PATHS" to "$REMOTE_STORE" printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix index b9e7352c0..7b2e6f708 100644 --- a/tests/remote-builds.nix +++ b/tests/remote-builds.nix @@ -61,7 +61,7 @@ in } ]; virtualisation.writableStore = true; - virtualisation.pathsInNixDB = [ config.system.build.extraUtils ]; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; nix.binaryCaches = lib.mkForce [ ]; programs.ssh.extraConfig = "ConnectTimeout 30"; }; diff --git a/tests/repl.sh b/tests/repl.sh index 6505f1741..c555560cc 100644 --- a/tests/repl.sh +++ b/tests/repl.sh @@ -1,29 +1,37 @@ source common.sh +testDir="$PWD" +cd "$TEST_ROOT" + replCmds=" simple = 1 -simple = import ./simple.nix -:b simple +simple = import $testDir/simple.nix +:bl simple :log simple " replFailingCmds=" -failing = import ./simple-failing.nix +failing = import $testDir/simple-failing.nix :b failing :log failing " replUndefinedVariable=" -import ./undefined-variable.nix +import $testDir/undefined-variable.nix " testRepl () { local nixArgs=("$@") + rm -rf repl-result-out || true # cleanup from other runs backed by a foreign nix store local replOutput="$(nix repl "${nixArgs[@]}" <<< "$replCmds")" echo "$replOutput" local outPath=$(echo "$replOutput" |& grep -o -E "$NIX_STORE_DIR/\w*-simple") nix path-info "${nixArgs[@]}" "$outPath" + [ "$(realpath ./repl-result-out)" == "$outPath" ] || fail "nix repl :bl doesn't make a symlink" + # run it again without checking the output to ensure the previously created symlink gets overwritten + nix repl "${nixArgs[@]}" <<< "$replCmds" || fail "nix repl does not work twice with the same inputs" + # simple.nix prints a PATH during build echo "$replOutput" | grep -qs 'PATH=' || fail "nix repl :log doesn't output logs" local replOutput="$(nix repl "${nixArgs[@]}" <<< "$replFailingCmds" 2>&1)" @@ -34,6 +42,11 @@ testRepl () { echo "$replOutput" echo "$replOutput" | grep -qs "while evaluating the file" \ || fail "nix repl --show-trace doesn't show the trace" + + nix repl "${nixArgs[@]}" --option pure-eval true 2>&1 <<< "builtins.currentSystem" \ + | grep "attribute 'currentSystem' missing" + nix repl "${nixArgs[@]}" 2>&1 <<< "builtins.currentSystem" \ + | grep "$(nix-instantiate --eval -E 'builtins.currentSystem')" } # Simple test, try building a drv @@ -42,15 +55,17 @@ testRepl testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR" testReplResponse () { - local response="$(nix repl <<< "$1")" - echo "$response" | grep -qs "$2" \ + local commands="$1"; shift + local expectedResponse="$1"; shift + local response="$(nix repl "$@" <<< "$commands")" + echo "$response" | grep -qs "$expectedResponse" \ || fail "repl command set: -$1 +$commands does not respond with: -$2 +$expectedResponse but with: @@ -63,3 +78,48 @@ testReplResponse ' :a { a = "2"; } "result: ${a}" ' "result: 2" + +testReplResponse ' +drvPath +' '".*-simple.drv"' \ +$testDir/simple.nix + +testReplResponse ' +drvPath +' '".*-simple.drv"' \ +--file $testDir/simple.nix --experimental-features 'ca-derivations' + +testReplResponse ' +drvPath +' '".*-simple.drv"' \ +--file $testDir/simple.nix --extra-experimental-features 'repl-flake ca-derivations' + +mkdir -p flake && cat <<EOF > flake/flake.nix +{ + outputs = { self }: { + foo = 1; + bar.baz = 2; + + changingThing = "beforeChange"; + }; +} +EOF +testReplResponse ' +foo + baz +' "3" \ + ./flake ./flake\#bar --experimental-features 'flakes repl-flake' + +# Test the `:reload` mechansim with flakes: +# - Eval `./flake#changingThing` +# - Modify the flake +# - Re-eval it +# - Check that the result has changed +replResult=$( ( +echo "changingThing" +sleep 1 # Leave the repl the time to eval 'foo' +sed -i 's/beforeChange/afterChange/' flake/flake.nix +echo ":reload" +echo "changingThing" +) | nix repl ./flake --experimental-features 'flakes repl-flake') +echo "$replResult" | grep -qs beforeChange +echo "$replResult" | grep -qs afterChange diff --git a/tests/search.sh b/tests/search.sh index 52e12f381..1a98f5b49 100644 --- a/tests/search.sh +++ b/tests/search.sh @@ -28,11 +28,19 @@ nix search -f search.nix '' |grep -q hello e=$'\x1b' # grep doesn't support \e, \033 or even \x1b # Multiple overlapping regexes -(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep "$e\[32;1mfoo$e\\[0;1m" | wc -l) == 1 )) -(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep "$e\[32;1mbroken bar$e\\[0m" | wc -l) == 1 )) +(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep -c "$e\[32;1mfoo$e\\[0;1m") == 1 )) +(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep -c "$e\[32;1mbroken bar$e\\[0m") == 1 )) # Multiple matches # Searching for 'o' should yield the 'o' in 'broken bar', the 'oo' in foo and 'o' in hello -(( $(nix search -f search.nix '' 'o' | grep -Eo "$e\[32;1mo{1,2}$e\[(0|0;1)m" | wc -l) == 3 )) +(( $(nix search -f search.nix '' 'o' | grep -Eoc "$e\[32;1mo{1,2}$e\[(0|0;1)m") == 3 )) # Searching for 'b' should yield the 'b' in bar and the two 'b's in 'broken bar' +# NOTE: This does not work with `grep -c` because it counts the two 'b's in 'broken bar' as one matched line (( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 )) + +## Tests for --exclude +(( $(nix search -f search.nix -e hello | grep -c hello) == 0 )) + +(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 )) +(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 )) +[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]] diff --git a/tests/selfref-gc.sh b/tests/selfref-gc.sh new file mode 100644 index 000000000..3f1f50eea --- /dev/null +++ b/tests/selfref-gc.sh @@ -0,0 +1,30 @@ +source common.sh + +requireDaemonNewerThan "2.6.0pre20211215" + +clearStore + +nix-build --no-out-link -E ' + with import ./config.nix; + + let d1 = mkDerivation { + name = "selfref-gc"; + outputs = [ "out" ]; + buildCommand = " + echo SELF_REF: $out > $out + "; + }; in + + # the only change from d1 is d1 as an (unused) build input + # to get identical store path in CA. + mkDerivation { + name = "selfref-gc"; + outputs = [ "out" ]; + buildCommand = " + echo UNUSED: ${d1} + echo SELF_REF: $out > $out + "; + } +' + +nix-collect-garbage diff --git a/tests/setuid.nix b/tests/setuid.nix index 35eb304ed..a83b1fc3a 100644 --- a/tests/setuid.nix +++ b/tests/setuid.nix @@ -10,12 +10,12 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") { makeTest { name = "setuid"; - machine = + nodes.machine = { config, lib, pkgs, ... }: { virtualisation.writableStore = true; nix.binaryCaches = lib.mkForce [ ]; nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ]; - virtualisation.pathsInNixDB = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ]; + virtualisation.additionalPaths = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ]; }; testScript = { nodes }: '' diff --git a/tests/shell-hello.nix b/tests/shell-hello.nix index 77dcbd2a9..3fdd3501d 100644 --- a/tests/shell-hello.nix +++ b/tests/shell-hello.nix @@ -3,15 +3,24 @@ with import ./config.nix; { hello = mkDerivation { name = "hello"; + outputs = [ "out" "dev" ]; + meta.outputsToInstall = [ "out" ]; buildCommand = '' - mkdir -p $out/bin + mkdir -p $out/bin $dev/bin + cat > $out/bin/hello <<EOF #! ${shell} who=\$1 echo "Hello \''${who:-World} from $out/bin/hello" EOF chmod +x $out/bin/hello + + cat > $dev/bin/hello2 <<EOF + #! ${shell} + echo "Hello2" + EOF + chmod +x $dev/bin/hello2 ''; }; } diff --git a/tests/shell.sh b/tests/shell.sh index 2b85bb337..6a80e8385 100644 --- a/tests/shell.sh +++ b/tests/shell.sh @@ -6,6 +6,10 @@ clearCache nix shell -f shell-hello.nix hello -c hello | grep 'Hello World' nix shell -f shell-hello.nix hello -c hello NixOS | grep 'Hello NixOS' +# Test output selection. +nix shell -f shell-hello.nix hello^dev -c hello2 | grep 'Hello2' +nix shell -f shell-hello.nix 'hello^*' -c hello2 | grep 'Hello2' + if ! canUseSandbox; then exit 99; fi chmod -R u+w $TEST_ROOT/store0 || true diff --git a/tests/signing.sh b/tests/signing.sh index 6aafbeb91..9b673c609 100644 --- a/tests/signing.sh +++ b/tests/signing.sh @@ -81,7 +81,7 @@ info=$(nix path-info --store file://$cacheDir --json $outPath2) [[ $info =~ 'cache1.example.org' ]] [[ $info =~ 'cache2.example.org' ]] -# Copying to a diverted store should fail due to a lack of valid signatures. +# Copying to a diverted store should fail due to a lack of signatures by trusted keys. chmod -R u+w $TEST_ROOT/store0 || true rm -rf $TEST_ROOT/store0 (! nix copy --to $TEST_ROOT/store0 $outPath) diff --git a/tests/sourcehut-flakes.nix b/tests/sourcehut-flakes.nix index d1d89d149..daa259dd6 100644 --- a/tests/sourcehut-flakes.nix +++ b/tests/sourcehut-flakes.nix @@ -59,7 +59,7 @@ let echo 'ref: refs/heads/master' > $out/HEAD mkdir -p $out/info - echo '${nixpkgs.rev} refs/heads/master' > $out/info/refs + echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs ''; in @@ -106,7 +106,7 @@ makeTest ( { virtualisation.writableStore = true; virtualisation.diskSize = 2048; - virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ]; + virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ]; virtualisation.memorySize = 4096; nix.binaryCaches = lib.mkForce [ ]; nix.extraOptions = '' @@ -132,6 +132,17 @@ makeTest ( client.succeed("curl -v https://git.sr.ht/ >&2") client.succeed("nix registry list | grep nixpkgs") + # Test that it resolves HEAD + rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs --json | jq -r .revision") + assert rev.strip() == "${nixpkgs.rev}", "revision mismatch" + # Test that it resolves branches + rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/master --json | jq -r .revision") + assert rev.strip() == "${nixpkgs.rev}", "revision mismatch" + # Test that it resolves tags + rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/foo-bar --json | jq -r .revision") + assert rev.strip() == "${nixpkgs.rev}", "revision mismatch" + + # Registry and pinning test rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision") assert rev.strip() == "${nixpkgs.rev}", "revision mismatch" diff --git a/tests/suggestions.sh b/tests/suggestions.sh new file mode 100644 index 000000000..f18fefef9 --- /dev/null +++ b/tests/suggestions.sh @@ -0,0 +1,44 @@ +source common.sh + +clearStore + +cd "$TEST_HOME" + +cat <<EOF > flake.nix +{ + outputs = a: { + packages.$system = { + foo = 1; + fo1 = 1; + fo2 = 1; + fooo = 1; + foooo = 1; + fooooo = 1; + fooooo1 = 1; + fooooo2 = 1; + fooooo3 = 1; + fooooo4 = 1; + fooooo5 = 1; + fooooo6 = 1; + }; + }; +} +EOF + +# Probable typo in the requested attribute path. Suggest some close possibilities +NIX_BUILD_STDERR_WITH_SUGGESTIONS=$(! nix build .\#fob 2>&1 1>/dev/null) +[[ "$NIX_BUILD_STDERR_WITH_SUGGESTIONS" =~ "Did you mean one of fo1, fo2, foo or fooo?" ]] || \ + fail "The nix build stderr should suggest the three closest possiblities" + +# None of the possible attributes is close to `bar`, so shouldn’t suggest anything +NIX_BUILD_STDERR_WITH_NO_CLOSE_SUGGESTION=$(! nix build .\#bar 2>&1 1>/dev/null) +[[ ! "$NIX_BUILD_STDERR_WITH_NO_CLOSE_SUGGESTION" =~ "Did you mean" ]] || \ + fail "The nix build stderr shouldn’t suggest anything if there’s nothing relevant to suggest" + +NIX_EVAL_STDERR_WITH_SUGGESTIONS=$(! nix build --impure --expr '(builtins.getFlake (builtins.toPath ./.)).packages.'$system'.fob' 2>&1 1>/dev/null) +[[ "$NIX_EVAL_STDERR_WITH_SUGGESTIONS" =~ "Did you mean one of fo1, fo2, foo or fooo?" ]] || \ + fail "The evaluator should suggest the three closest possiblities" + +NIX_EVAL_STDERR_WITH_SUGGESTIONS=$(! nix build --impure --expr '({ foo }: foo) { foo = 1; fob = 2; }' 2>&1 1>/dev/null) +[[ "$NIX_EVAL_STDERR_WITH_SUGGESTIONS" =~ "Did you mean foo?" ]] || \ + fail "The evaluator should suggest the three closest possiblities" diff --git a/tests/tarball.sh b/tests/tarball.sh index 1301922a5..d5cab879c 100644 --- a/tests/tarball.sh +++ b/tests/tarball.sh @@ -26,10 +26,14 @@ test_tarball() { nix-build -o $TEST_ROOT/result '<foo>' -I foo=file://$tarball nix-build -o $TEST_ROOT/result -E "import (fetchTarball file://$tarball)" + # Do not re-fetch paths already present + nix-build -o $TEST_ROOT/result -E "import (fetchTarball { url = file:///does-not-exist/must-remain-unused/$tarball; sha256 = \"$hash\"; })" nix-build -o $TEST_ROOT/result -E "import (fetchTree file://$tarball)" nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; })" nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })" + # Do not re-fetch paths already present + nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file:///does-not-exist/must-remain-unused/$tarball; narHash = \"$hash\"; })" nix-build -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"sha256-xdKv2pq/IiwLSnBBJXW8hNowI4MrdZfW+SYqDQs7Tzc=\"; })" 2>&1 | grep 'NAR hash mismatch in input' nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" >&2 diff --git a/tests/user-envs.nix b/tests/user-envs.nix index 6ac896ed8..46f8b51dd 100644 --- a/tests/user-envs.nix +++ b/tests/user-envs.nix @@ -8,6 +8,8 @@ assert foo == "foo"; let + platforms = let x = "foobar"; in [ x x ]; + makeDrv = name: progName: (mkDerivation { name = assert progName != "fail"; name; inherit progName system; @@ -15,6 +17,7 @@ let } // { meta = { description = "A silly test package with some \${escaped anti-quotation} in it"; + inherit platforms; }; }); diff --git a/tests/user-envs.sh b/tests/user-envs.sh index aebf6a2a2..d63fe780a 100644 --- a/tests/user-envs.sh +++ b/tests/user-envs.sh @@ -9,7 +9,6 @@ clearProfiles # Query installed: should be empty. test "$(nix-env -p $profiles/test -q '*' | wc -l)" -eq 0 -mkdir -p $TEST_HOME nix-env --switch-profile $profiles/test # Query available: should contain several. @@ -18,6 +17,16 @@ outPath10=$(nix-env -f ./user-envs.nix -qa --out-path --no-name '*' | grep foo-1 drvPath10=$(nix-env -f ./user-envs.nix -qa --drv-path --no-name '*' | grep foo-1.0) [ -n "$outPath10" -a -n "$drvPath10" ] +# Query with json +nix-env -f ./user-envs.nix -qa --json | jq -e '.[] | select(.name == "bar-0.1") | [ + .outputName == "out", + .outputs.out == null +] | all' +nix-env -f ./user-envs.nix -qa --json --out-path | jq -e '.[] | select(.name == "bar-0.1") | [ + .outputName == "out", + (.outputs.out | test("'$NIX_STORE_DIR'.*-0\\.1")) +] | all' + # Query descriptions. nix-env -f ./user-envs.nix -qa '*' --description | grep -q silly rm -rf $HOME/.nix-defexpr |