diff options
52 files changed, 493 insertions, 169 deletions
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b2b1f07fb..456a84f87 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,6 +4,7 @@ on: push: jobs: tests: + needs: [check_cachix] strategy: matrix: os: [ubuntu-latest, macos-latest] @@ -16,6 +17,7 @@ jobs: - uses: cachix/install-nix-action@v13 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - uses: cachix/cachix-action@v10 + if: needs.check_cachix.outputs.secret == 'true' with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' diff --git a/doc/manual/local.mk b/doc/manual/local.mk index 93ec3cad0..ca9af9c4a 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -78,23 +78,23 @@ man: doc/manual/generated/man1/nix3-manpages all: doc/manual/generated/man1/nix3-manpages $(mandir)/man1/nix3-manpages: doc/manual/generated/man1/nix3-manpages - @mkdir -p $$(dirname $@) - $(trace-install) install -m 0644 $$(dirname $<)/* $$(dirname $@) + @mkdir -p $(DESTDIR)$$(dirname $@) + $(trace-install) install -m 0644 $$(dirname $<)/* $(DESTDIR)$$(dirname $@) doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli - @mkdir -p $$(dirname $@) + @mkdir -p $(DESTDIR)$$(dirname $@) $(trace-gen) for i in doc/manual/src/command-ref/new-cli/*.md; do \ name=$$(basename $$i .md); \ tmpFile=$$(mktemp); \ if [[ $$name = SUMMARY ]]; then continue; fi; \ printf "Title: %s\n\n" "$$name" > $$tmpFile; \ cat $$i >> $$tmpFile; \ - lowdown -sT man -M section=1 $$tmpFile -o $$(dirname $@)/$$name.1; \ + lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \ rm $$tmpFile; \ done touch $@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md - $(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(docdir)/manual + $(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual endif diff --git a/doc/manual/src/expressions/language-values.md b/doc/manual/src/expressions/language-values.md index ce31029cc..28fa23b58 100644 --- a/doc/manual/src/expressions/language-values.md +++ b/doc/manual/src/expressions/language-values.md @@ -139,6 +139,13 @@ Nix has the following basic data types: environment variable `NIX_PATH` will be searched for the given file or directory name. + Antiquotation is supported in any paths except those in angle brackets. + `./${foo}-${bar}.nix` is a more convenient way of writing + `./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At + least one slash must appear *before* any antiquotations for this to be + recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division + operation. `./a.${foo}/b.${bar}` is a path. + - *Booleans* with values `true` and `false`. - The null value, denoted as `null`. diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md index 6825af707..40cb79627 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/src/installation/prerequisites-source.md @@ -26,15 +26,6 @@ available for download from the official repository <https://github.com/google/brotli>. - - The bzip2 compressor program and the `libbz2` library. Thus you must - have bzip2 installed, including development headers and libraries. - If your distribution does not provide these, you can obtain bzip2 - from - <https://sourceware.org/bzip2/>. - - - `liblzma`, which is provided by XZ Utils. If your distribution does - not provide this, you can get it from <https://tukaani.org/xz/>. - - cURL and its library. If your distribution does not provide it, you can get it from <https://curl.haxx.se/>. diff --git a/mk/libraries.mk b/mk/libraries.mk index 07bd54dab..fd4d4ee72 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -91,7 +91,7 @@ define build-library $(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) + $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) ifndef HOST_DARWIN $(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d)) @@ -105,7 +105,7 @@ define build-library $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) $$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) + $$(trace-ld) $(CXX) -o $$@ -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME))) ifndef HOST_DARWIN diff --git a/mk/programs.mk b/mk/programs.mk index d0cf5baf0..70b09f0dd 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -32,7 +32,7 @@ define build-program $$(eval $$(call create-dir, $$(_d))) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) + $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $(1)_INSTALL_DIR ?= $$(bindir) @@ -49,7 +49,7 @@ define build-program _libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH)) $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) + $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) else diff --git a/scripts/create-darwin-volume.sh b/scripts/create-darwin-volume.sh index 8aff03199..b52232dd3 100755 --- a/scripts/create-darwin-volume.sh +++ b/scripts/create-darwin-volume.sh @@ -715,7 +715,8 @@ create_volume() { # 6) getting special w/ awk may be fragile, but doing it to: # - save time over running slow diskutil commands # - skirt risk we grab wrong volume if multiple match - /usr/sbin/diskutil apfs addVolume "$NIX_VOLUME_USE_DISK" "$NIX_VOLUME_FS" "$NIX_VOLUME_LABEL" -nomount | /usr/bin/awk '/Created new APFS Volume/ {print $5}' + _sudo "to create a new APFS volume '$NIX_VOLUME_LABEL' on $NIX_VOLUME_USE_DISK" \ + /usr/sbin/diskutil apfs addVolume "$NIX_VOLUME_USE_DISK" "$NIX_VOLUME_FS" "$NIX_VOLUME_LABEL" -nomount | /usr/bin/awk '/Created new APFS Volume/ {print $5}' } volume_uuid_from_special() { @@ -738,7 +739,6 @@ await_volume() { setup_volume() { local use_special use_uuid profile_packages task "Creating a Nix volume" >&2 - # DOING: I'm tempted to wrap this call in a grep to get the new disk special without doing anything too complex, but this sudo wrapper *is* a little complex, so it'll be a PITA unless maybe we can skip sudo on this. Let's just try it without. use_special="${NIX_VOLUME_USE_SPECIAL:-$(create_volume)}" @@ -759,6 +759,11 @@ setup_volume() { await_volume + if [ "$(/usr/sbin/diskutil info -plist "$NIX_ROOT" | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "<false/>" ]; then + _sudo "to set enableOwnership (enabling users to own files)" \ + /usr/sbin/diskutil enableOwnership "$NIX_ROOT" + fi + # TODO: below is a vague kludge for now; I just don't know # what if any safe action there is to take here. Also, the # reminder isn't very helpful. diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index f8d6c5e8f..a5166de64 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -206,4 +206,8 @@ poly_prepare_to_install() { EOF setup_darwin_volume fi + + if [ "$(diskutil info -plist /nix | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "<false/>" ]; then + failure "This script needs a /nix volume with global permissions! This may require running sudo diskutil enableOwnership /nix." + fi } diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index d02c5cac0..513127a62 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -33,7 +33,7 @@ NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" readonly NIX_ROOT="/nix" readonly NIX_EXTRA_CONF=${NIX_EXTRA_CONF:-} -readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshenv" "/etc/bash.bashrc" "/etc/zsh/zshenv") +readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/etc/bash.bashrc" "/etc/zsh/zshrc") readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix" readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" diff --git a/scripts/install.in b/scripts/install.in index ffc1f2785..5be4f9dda 100755 --- a/scripts/install.in +++ b/scripts/install.in @@ -76,14 +76,21 @@ fi tarball=$tmpDir/nix-@nixVersion@-$system.tar.xz -require_util curl "download the binary tarball" require_util tar "unpack the binary tarball" if [ "$(uname -s)" != "Darwin" ]; then require_util xz "unpack the binary tarball" fi +if command -v wget > /dev/null 2>&1; then + fetch() { wget "$1" -O "$2"; } +elif command -v curl > /dev/null 2>&1; then + fetch() { curl -L "$1" -o "$2"; } +else + oops "you don't have wget or curl installed, which I need to download the binary tarball" +fi + echo "downloading Nix @nixVersion@ binary tarball for $system from '$url' to '$tmpDir'..." -curl -L "$url" -o "$tarball" || oops "failed to download '$url'" +fetch "$url" "$tarball" || oops "failed to download '$url'" if command -v sha256sum > /dev/null 2>&1; then hash2="$(sha256sum -b "$tarball" | cut -c1-64)" diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index f3625ed0d..c7af8bb7c 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -108,6 +108,8 @@ enum class Realise { exists. */ Derivation, /* Evaluate in dry-run mode. Postcondition: nothing. */ + // FIXME: currently unused, but could be revived if we can + // evaluate derivations in-memory. Nothing }; diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 68e0469c3..86080f53a 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -654,6 +654,17 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables( for (auto & s : ss) { std::exception_ptr ex; + if (s.find('/') != std::string::npos) { + try { + result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s))); + continue; + } catch (BadStorePath &) { + } catch (...) { + if (!ex) + ex = std::current_exception(); + } + } + try { auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath(".")); result.push_back(std::make_shared<InstallableFlake>( @@ -668,25 +679,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables( ex = std::current_exception(); } - if (s.find('/') != std::string::npos) { - try { - result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s))); - continue; - } catch (BadStorePath &) { - } catch (...) { - if (!ex) - ex = std::current_exception(); - } - } - std::rethrow_exception(ex); - - /* - throw Error( - pathExists(s) - ? "path '%s' is not a flake or a store path" - : "don't know how to handle argument '%s'", s); - */ } } @@ -763,7 +756,7 @@ BuiltPaths build(ref<Store> evalStore, ref<Store> store, Realise mode, pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end()); } - if (mode == Realise::Nothing) + if (mode == Realise::Nothing || mode == Realise::Derivation) printMissing(store, pathsToBuild, lvlError); else if (mode == Realise::Outputs) store->buildPaths(pathsToBuild, bMode, evalStore); diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk index 0a684468e..8b0662753 100644 --- a/src/libcmd/local.mk +++ b/src/libcmd/local.mk @@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc) libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -libcmd_LDFLAGS = -llowdown -pthread +libcmd_LDFLAGS += -llowdown -pthread libcmd_LIBS = libstore libutil libexpr libmain libfetchers diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh index 1da8d91df..7d6ffc9f3 100644 --- a/src/libexpr/attr-set.hh +++ b/src/libexpr/attr-set.hh @@ -17,8 +17,8 @@ struct Attr { Symbol name; Value * value; - Pos * pos; - Attr(Symbol name, Value * value, Pos * pos = &noPos) + ptr<Pos> pos; + Attr(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos)) : name(name), value(value), pos(pos) { }; Attr() : pos(&noPos) { }; bool operator < (const Attr & a) const @@ -35,13 +35,13 @@ class Bindings { public: typedef uint32_t size_t; - Pos *pos; + ptr<Pos> pos; private: size_t size_, capacity_; Attr attrs[0]; - Bindings(size_t capacity) : size_(0), capacity_(capacity) { } + Bindings(size_t capacity) : pos(&noPos), size_(0), capacity_(capacity) { } Bindings(const Bindings & bindings) = delete; public: diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 327f7e974..76541da8b 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -770,7 +770,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) } Bindings::iterator j = env->values[0]->attrs->find(var.name); if (j != env->values[0]->attrs->end()) { - if (countCalls && j->pos) attrSelects[*j->pos]++; + if (countCalls) attrSelects[*j->pos]++; return j->value; } if (!env->prevWith) @@ -825,9 +825,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr) } -void EvalState::mkPos(Value & v, Pos * pos) +void EvalState::mkPos(Value & v, ptr<Pos> pos) { - if (pos && pos->file.set()) { + if (pos->file.set()) { mkAttrs(v, 3); mkString(*allocAttr(v, sFile), pos->file); mkInt(*allocAttr(v, sLine), pos->line); @@ -1027,7 +1027,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) } else vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); env2.values[displ++] = vAttr; - v.attrs->push_back(Attr(i.first, vAttr, &i.second.pos)); + v.attrs->push_back(Attr(i.first, vAttr, ptr(&i.second.pos))); } /* If the rec contains an attribute called `__overrides', then @@ -1059,7 +1059,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) else for (auto & i : attrs) - v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), &i.second.pos)); + v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), ptr(&i.second.pos))); /* Dynamic attrs apply *after* rec and __overrides. */ for (auto & i : dynamicAttrs) { @@ -1076,11 +1076,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) i.valueExpr->setName(nameSym); /* Keep sorted order so find can catch duplicates */ - v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), &i.pos)); + v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), ptr(&i.pos))); v.attrs->sort(); // FIXME: inefficient } - v.attrs->pos = &pos; + v.attrs->pos = ptr(&pos); } @@ -1138,7 +1138,7 @@ static string showAttrPath(EvalState & state, Env & env, const AttrPath & attrPa void ExprSelect::eval(EvalState & state, Env & env, Value & v) { Value vTmp; - Pos * pos2 = 0; + ptr<Pos> pos2(&noPos); Value * vAttrs = &vTmp; e->eval(state, env, vTmp); @@ -1164,13 +1164,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) } vAttrs = j->value; pos2 = j->pos; - if (state.countCalls && pos2) state.attrSelects[*pos2]++; + if (state.countCalls) state.attrSelects[*pos2]++; } - state.forceValue(*vAttrs, ( pos2 != NULL ? *pos2 : this->pos ) ); + state.forceValue(*vAttrs, (*pos2 != noPos ? *pos2 : this->pos ) ); } catch (Error & e) { - if (pos2 && pos2->file != state.sDerivationNix) + if (*pos2 != noPos && pos2->file != state.sDerivationNix) addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'", showAttrPath(state, env, attrPath)); throw; @@ -1576,7 +1576,6 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) and none of the strings are allowed to have contexts. */ if (first) { firstType = vTmp.type(); - first = false; } if (firstType == nInt) { @@ -1597,7 +1596,12 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) } else throwEvalError(pos, "cannot add %1% to a float", showType(vTmp)); } else - s << state.coerceToString(pos, vTmp, context, false, firstType == nString); + /* skip canonization of first path, which would only be not + canonized in the first place if it's coming from a ./${foo} type + path */ + s << state.coerceToString(pos, vTmp, context, false, firstType == nString, !first); + + first = false; } if (firstType == nInt) @@ -1616,7 +1620,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) void ExprPos::eval(EvalState & state, Env & env, Value & v) { - state.mkPos(v, &pos); + state.mkPos(v, ptr(&pos)); } @@ -1786,7 +1790,7 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v, } string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context, - bool coerceMore, bool copyToStore) + bool coerceMore, bool copyToStore, bool canonicalizePath) { forceValue(v, pos); @@ -1798,7 +1802,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context, } if (v.type() == nPath) { - Path path(canonPath(v.path)); + Path path(canonicalizePath ? canonPath(v.path) : v.path); return copyToStore ? copyPathToStore(context, path) : path; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 6f3474854..93e1ef05f 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -100,6 +100,8 @@ public: /* Store used to build stuff. */ const ref<Store> buildStore; + RootValue vCallFlake = nullptr; + RootValue vImportedDrvToDerivation = nullptr; private: SrcToStore srcToStore; @@ -224,7 +226,8 @@ public: booleans and lists to a string. If `copyToStore' is set, referenced paths are copied to the Nix store as a side effect. */ string coerceToString(const Pos & pos, Value & v, PathSet & context, - bool coerceMore = false, bool copyToStore = true); + bool coerceMore = false, bool copyToStore = true, + bool canonicalizePath = true); string copyPathToStore(PathSet & context, const Path & path); @@ -308,7 +311,7 @@ public: void mkList(Value & v, size_t length); void mkAttrs(Value & v, size_t capacity); void mkThunk_(Value & v, Expr * expr); - void mkPos(Value & v, Pos * pos); + void mkPos(Value & v, ptr<Pos> pos); void concatLists(Value & v, size_t nrLists, Value * * lists, const Pos & pos); diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index a2f100cbd..010d97285 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -325,25 +325,38 @@ LockedFlake lockFlake( std::vector<FlakeRef> parents; + struct LockParent { + /* The path to this parent. */ + InputPath path; + + /* Whether we are currently inside a top-level lockfile + (inputs absolute) or subordinate lockfile (inputs + relative). */ + bool absolute; + }; + std::function<void( const FlakeInputs & flakeInputs, std::shared_ptr<Node> node, const InputPath & inputPathPrefix, - std::shared_ptr<const Node> oldNode)> + std::shared_ptr<const Node> oldNode, + const LockParent & parent, + const Path & parentPath)> computeLocks; computeLocks = [&]( const FlakeInputs & flakeInputs, std::shared_ptr<Node> node, const InputPath & inputPathPrefix, - std::shared_ptr<const Node> oldNode) + std::shared_ptr<const Node> oldNode, + const LockParent & parent, + const Path & parentPath) { debug("computing lock file node '%s'", printInputPath(inputPathPrefix)); /* Get the overrides (i.e. attributes of the form 'inputs.nixops.inputs.nixpkgs.url = ...'). */ - // FIXME: check this - for (auto & [id, input] : flake.inputs) { + for (auto & [id, input] : flakeInputs) { for (auto & [idOverride, inputOverride] : input.overrides) { auto inputPath(inputPathPrefix); inputPath.push_back(id); @@ -379,15 +392,19 @@ LockedFlake lockFlake( path we haven't processed yet. */ if (input.follows) { InputPath target; - if (hasOverride || input.absolute) - /* 'follows' from an override is relative to the - root of the graph. */ + + if (parent.absolute && !hasOverride) { target = *input.follows; - else { - /* Otherwise, it's relative to the current flake. */ - target = inputPathPrefix; + } else { + if (hasOverride) { + target = inputPathPrefix; + target.pop_back(); + } else + target = parent.path; + for (auto & i : *input.follows) target.push_back(i); } + debug("input '%s' follows '%s'", inputPathS, printInputPath(target)); node->inputs.insert_or_assign(id, target); continue; @@ -433,7 +450,7 @@ LockedFlake lockFlake( if (hasChildUpdate) { auto inputFlake = getFlake( state, oldLock->lockedRef, false, flakeCache); - computeLocks(inputFlake.inputs, childNode, inputPath, oldLock); + computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, parent, parentPath); } else { /* No need to fetch this flake, we can be lazy. However there may be new overrides on the @@ -450,12 +467,11 @@ LockedFlake lockFlake( } else if (auto follows = std::get_if<1>(&i.second)) { fakeInputs.emplace(i.first, FlakeInput { .follows = *follows, - .absolute = true }); } } - computeLocks(fakeInputs, childNode, inputPath, oldLock); + computeLocks(fakeInputs, childNode, inputPath, oldLock, parent, parentPath); } } else { @@ -467,7 +483,17 @@ LockedFlake lockFlake( throw Error("cannot update flake input '%s' in pure mode", inputPathS); if (input.isFlake) { - auto inputFlake = getFlake(state, *input.ref, useRegistries, flakeCache); + Path localPath = parentPath; + FlakeRef localRef = *input.ref; + + // If this input is a path, recurse it down. + // This allows us to resolve path inputs relative to the current flake. + if (localRef.input.getType() == "path") { + localRef.input.parent = parentPath; + localPath = canonPath(parentPath + "/" + *input.ref->input.getSourcePath()); + } + + auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache); /* Note: in case of an --override-input, we use the *original* ref (input2.ref) for the @@ -488,6 +514,13 @@ LockedFlake lockFlake( parents.push_back(*input.ref); Finally cleanup([&]() { parents.pop_back(); }); + // Follows paths from existing inputs in the top-level lockfile are absolute, + // whereas paths in subordinate lockfiles are relative to those lockfiles. + LockParent newParent { + .path = inputPath, + .absolute = oldLock ? true : false + }; + /* Recursively process the inputs of this flake. Also, unless we already have this flake in the top-level lock file, use this flake's @@ -497,7 +530,8 @@ LockedFlake lockFlake( oldLock ? std::dynamic_pointer_cast<const Node>(oldLock) : LockFile::read( - inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root); + inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root, + newParent, localPath); } else { @@ -515,9 +549,17 @@ LockedFlake lockFlake( } }; + LockParent parent { + .path = {}, + .absolute = true + }; + + // Bring in the current ref for relative path resolution if we have it + auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir); + computeLocks( flake.inputs, newLockFile.root, {}, - lockFlags.recreateLockFile ? nullptr : oldLockFile.root); + lockFlags.recreateLockFile ? nullptr : oldLockFile.root, parent, parentPath); for (auto & i : lockFlags.inputOverrides) if (!overridesUsed.count(i.first)) @@ -621,16 +663,14 @@ void callFlake(EvalState & state, mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir); - static RootValue vCallFlake = nullptr; - - if (!vCallFlake) { - vCallFlake = allocRootValue(state.allocValue()); + if (!state.vCallFlake) { + state.vCallFlake = allocRootValue(state.allocValue()); state.eval(state.parseExprFromString( #include "call-flake.nix.gen.hh" - , "/"), **vCallFlake); + , "/"), **state.vCallFlake); } - state.callFunction(**vCallFlake, *vLocks, *vTmp1, noPos); + state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos); state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos); state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos); } @@ -646,7 +686,7 @@ static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Va lockFlake(state, flakeRef, LockFlags { .updateLockFile = false, - .useRegistries = !evalSettings.pureEval && !settings.useRegistries, + .useRegistries = !evalSettings.pureEval && settings.useRegistries, .allowMutable = !evalSettings.pureEval, }), v); diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh index 15fd394f8..d46da9d68 100644 --- a/src/libexpr/flake/flake.hh +++ b/src/libexpr/flake/flake.hh @@ -43,7 +43,6 @@ struct FlakeInput std::optional<FlakeRef> ref; bool isFlake = true; // true = process flake to get outputs, false = (fetched) static source path std::optional<InputPath> follows; - bool absolute = false; // whether 'follows' is relative to the flake root FlakeInputs overrides; }; diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 27975dc9e..8ad6a1957 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -9,6 +9,9 @@ %s DEFAULT %x STRING %x IND_STRING +%x INPATH +%x INPATH_SLASH +%x PATH_START %{ @@ -97,9 +100,12 @@ ANY .|\n ID [a-zA-Z\_][a-zA-Z0-9\_\'\-]* INT [0-9]+ FLOAT (([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)? -PATH [a-zA-Z0-9\.\_\-\+]*(\/[a-zA-Z0-9\.\_\-\+]+)+\/? -HPATH \~(\/[a-zA-Z0-9\.\_\-\+]+)+\/? -SPATH \<[a-zA-Z0-9\.\_\-\+]+(\/[a-zA-Z0-9\.\_\-\+]+)*\> +PATH_CHAR [a-zA-Z0-9\.\_\-\+] +PATH {PATH_CHAR}*(\/{PATH_CHAR}+)+\/? +PATH_SEG {PATH_CHAR}*\/ +HPATH \~(\/{PATH_CHAR}+)+\/? +HPATH_START \~\/ +SPATH \<{PATH_CHAR}+(\/{PATH_CHAR}+)*\> URI [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*\']+ @@ -200,17 +206,73 @@ or { return OR_KW; } return IND_STR; } +{PATH_SEG}\$\{ | +{HPATH_START}\$\{ { + PUSH_STATE(PATH_START); + yyless(0); +} + +<PATH_START>{PATH_SEG} { + POP_STATE(); + PUSH_STATE(INPATH_SLASH); + yylval->path = strdup(yytext); + return PATH; +} + +<PATH_START>{HPATH_START} { + POP_STATE(); + PUSH_STATE(INPATH_SLASH); + yylval->path = strdup(yytext); + return HPATH; +} + +{PATH} { + if (yytext[yyleng-1] == '/') + PUSH_STATE(INPATH_SLASH); + else + PUSH_STATE(INPATH); + yylval->path = strdup(yytext); + return PATH; +} +{HPATH} { + if (yytext[yyleng-1] == '/') + PUSH_STATE(INPATH_SLASH); + else + PUSH_STATE(INPATH); + yylval->path = strdup(yytext); + return HPATH; +} + +<INPATH,INPATH_SLASH>\$\{ { + POP_STATE(); + PUSH_STATE(INPATH); + PUSH_STATE(DEFAULT); + return DOLLAR_CURLY; +} +<INPATH,INPATH_SLASH>{PATH}|{PATH_SEG}|{PATH_CHAR}+ { + POP_STATE(); + if (yytext[yyleng-1] == '/') + PUSH_STATE(INPATH_SLASH); + else + PUSH_STATE(INPATH); + yylval->e = new ExprString(data->symbols.create(string(yytext))); + return STR; +} +<INPATH>{ANY} | +<INPATH><<EOF>> { + /* if we encounter a non-path character we inform the parser that the path has + ended with a PATH_END token and re-parse this character in the default + context (it may be ')', ';', or something of that sort) */ + POP_STATE(); + yyless(0); + return PATH_END; +} + +<INPATH_SLASH>{ANY} | +<INPATH_SLASH><<EOF>> { + throw ParseError("path has a trailing slash"); +} -{PATH} { if (yytext[yyleng-1] == '/') - throw ParseError("path '%s' has a trailing slash", yytext); - yylval->path = strdup(yytext); - return PATH; - } -{HPATH} { if (yytext[yyleng-1] == '/') - throw ParseError("path '%s' has a trailing slash", yytext); - yylval->path = strdup(yytext); - return HPATH; - } {SPATH} { yylval->path = strdup(yytext); return SPATH; } {URI} { yylval->uri = strdup(yytext); return URI; } diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index 5ba11c619..016631647 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -15,7 +15,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib libexpr_LIBS = libutil libstore libfetchers -libexpr_LDFLAGS = -lboost_context -pthread +libexpr_LDFLAGS += -lboost_context -pthread ifdef HOST_LINUX libexpr_LDFLAGS += -ldl endif diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index f948dde47..e3749783a 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -290,13 +290,13 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err %type <formal> formal %type <attrNames> attrs attrpath %type <string_parts> string_parts_interpolated ind_string_parts -%type <e> string_parts string_attr +%type <e> path_start string_parts string_attr %type <id> attr %token <id> ID ATTRPATH %token <e> STR IND_STR %token <n> INT %token <nf> FLOAT -%token <path> PATH HPATH SPATH +%token <path> PATH HPATH SPATH PATH_END %token <uri> URI %token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW %token DOLLAR_CURLY /* == ${ */ @@ -405,8 +405,11 @@ expr_simple | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { $$ = stripIndentation(CUR_POS, data->symbols, *$2); } - | PATH { $$ = new ExprPath(absPath($1, data->basePath)); } - | HPATH { $$ = new ExprPath(getHome() + string{$1 + 1}); } + | path_start PATH_END { $$ = $1; } + | path_start string_parts_interpolated PATH_END { + $2->insert($2->begin(), $1); + $$ = new ExprConcatStrings(CUR_POS, false, $2); + } | SPATH { string path($1 + 1, strlen($1) - 2); $$ = new ExprApp(CUR_POS, @@ -452,6 +455,20 @@ string_parts_interpolated } ; +path_start + : PATH { + Path path(absPath($1, data->basePath)); + /* add back in the trailing '/' to the first segment */ + if ($1[strlen($1)-1] == '/' && strlen($1) > 1) + path += "/"; + $$ = new ExprPath(path); + } + | HPATH { + Path path(getHome() + string($1 + 1)); + $$ = new ExprPath(path); + } + ; + ind_string_parts : ind_string_parts IND_STR { $$ = $1; $1->push_back($2); } | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index bfe41c9fa..25d60e175 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -160,16 +160,15 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS } w.attrs->sort(); - static RootValue fun; - if (!fun) { - fun = allocRootValue(state.allocValue()); + if (!state.vImportedDrvToDerivation) { + state.vImportedDrvToDerivation = allocRootValue(state.allocValue()); state.eval(state.parseExprFromString( #include "imported-drv-to-derivation.nix.gen.hh" - , "/"), **fun); + , "/"), **state.vImportedDrvToDerivation); } - state.forceFunction(**fun, pos); - mkApp(v, **fun, w); + state.forceFunction(**state.vImportedDrvToDerivation, pos); + mkApp(v, **state.vImportedDrvToDerivation, w); state.forceAttrs(v, pos); } @@ -2109,7 +2108,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v) pos ); // !!! add to stack trace? - if (state.countCalls && i->pos) state.attrSelects[*i->pos]++; + if (state.countCalls && *i->pos != noPos) state.attrSelects[*i->pos]++; state.forceValue(*i->value, pos); v = *i->value; } @@ -2369,7 +2368,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args for (auto & i : args[0]->lambda.fun->formals->formals) { // !!! should optimise booleans (allocate only once) Value * value = state.allocValue(); - v.attrs->push_back(Attr(i.name, value, &i.pos)); + v.attrs->push_back(Attr(i.name, value, ptr(&i.pos))); mkBool(*value, i.def); } v.attrs->sort(); diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 730db84ed..9990a0207 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -45,7 +45,7 @@ void emitTreeAttrs( if (input.getType() == "git") mkBool(*state.allocAttr(v, state.symbols.create("submodules")), - fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); + fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(true)); if (auto revCount = input.getRevCount()) mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount); diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index 7464455d8..2ddc5f751 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -42,7 +42,7 @@ static void showAttrs(EvalState & state, bool strict, bool location, XMLAttrs xmlAttrs; xmlAttrs["name"] = i; - if (location && a.pos != &noPos) posToXML(xmlAttrs, *a.pos); + if (location && a.pos != ptr(&noPos)) posToXML(xmlAttrs, *a.pos); XMLOpenElement _(doc, "attr", xmlAttrs); printValueAsXML(state, strict, location, diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index c839cf23b..c43b047a7 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -38,6 +38,9 @@ struct Input bool immutable = false; bool direct = true; + /* path of the parent of this input, used for relative path resolution */ + std::optional<Path> parent; + public: static Input fromURL(const std::string & url); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index d89763ccd..6b1057c63 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -173,7 +173,7 @@ struct GitInputScheme : InputScheme std::string name = input.getName(); bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false); - bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false); + bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(true); bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false); std::string cacheType = "git"; diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk index bbef03afe..2e8869d83 100644 --- a/src/libfetchers/local.mk +++ b/src/libfetchers/local.mk @@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc) libfetchers_CXXFLAGS += -I src/libutil -I src/libstore -libfetchers_LDFLAGS = -pthread +libfetchers_LDFLAGS += -pthread libfetchers_LIBS = libutil libstore diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index d1003de57..b6fcdac9e 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -82,18 +82,30 @@ struct PathInputScheme : InputScheme std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override { + std::string absPath; auto path = getStrAttr(input.attrs, "path"); - // FIXME: check whether access to 'path' is allowed. + if (path[0] != '/' && input.parent) { + auto parent = canonPath(*input.parent); + + // the path isn't relative, prefix it + absPath = canonPath(parent + "/" + path); - auto storePath = store->maybeParseStorePath(path); + // for security, ensure that if the parent is a store path, it's inside it + if (!parent.rfind(store->storeDir, 0) && absPath.rfind(store->storeDir, 0)) + throw BadStorePath("relative path '%s' points outside of its parent's store path %s, this is a security violation", path, parent); + } else + absPath = path; + + // FIXME: check whether access to 'path' is allowed. + auto storePath = store->maybeParseStorePath(absPath); if (storePath) store->addTempRoot(*storePath); if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) // FIXME: try to substitute storePath. - storePath = store->addToStore("source", path); + storePath = store->addToStore("source", absPath); return { Tree(store->toRealPath(*storePath), std::move(*storePath)), diff --git a/src/libmain/local.mk b/src/libmain/local.mk index f45d6e3ff..99da95e27 100644 --- a/src/libmain/local.mk +++ b/src/libmain/local.mk @@ -8,7 +8,7 @@ libmain_SOURCES := $(wildcard $(d)/*.cc) libmain_CXXFLAGS += -I src/libutil -I src/libstore -libmain_LDFLAGS = $(OPENSSL_LIBS) +libmain_LDFLAGS += $(OPENSSL_LIBS) libmain_LIBS = libstore libutil diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 876b8def0..b94accb73 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -566,7 +566,7 @@ void DerivationGoal::tryToBuild() lockFiles.insert(worker.store.Store::toRealPath(*i.second.second)); else lockFiles.insert( - worker.store.Store::toRealPath(drvPath) + "!" + i.first + worker.store.Store::toRealPath(drvPath) + "." + i.first ); } } diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index d68ff64d7..487416a13 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -227,6 +227,12 @@ struct ClientSettings try { if (name == "ssh-auth-sock") // obsolete ; + else if (name == settings.experimentalFeatures.name) { + // We don’t want to forward the experimental features to + // the daemon, as that could cause some pretty weird stuff + if (tokenizeString<Strings>(value) != settings.experimentalFeatures.get()) + debug("Ignoring the client-specified experimental features"); + } else if (trusted || name == settings.buildTimeout.name || name == "connect-timeout" diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 747eb205e..2890df479 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1071,14 +1071,19 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths) } +// FIXME: move this, it's not specific to LocalStore. void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos) { if (!settings.useSubstitutes) return; for (auto & sub : getDefaultSubstituters()) { for (auto & path : paths) { + if (infos.count(path.first)) + // Choose first succeeding substituter. + continue; + auto subPath(path.first); - // recompute store path so that we can use a different store root + // Recompute store path so that we can use a different store root. if (path.second) { subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second); if (sub->storeDir == storeDir) diff --git a/src/libstore/local.mk b/src/libstore/local.mk index b87cee8d5..b992bcbc0 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc) libstore_LIBS = libutil -libstore_LDFLAGS = $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread +libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread ifdef HOST_LINUX libstore_LDFLAGS += -ldl endif diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 140f39120..73f590e7b 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -222,6 +222,7 @@ void RemoteStore::setOptions(Connection & conn) overrides.erase(settings.buildCores.name); overrides.erase(settings.useSubstitutes.name); overrides.erase(loggerSettings.showTrace.name); + overrides.erase(settings.experimentalFeatures.name); conn.to << overrides.size(); for (auto & i : overrides) conn.to << i.first << i.second.value; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 4fb6c40c7..54471bdf2 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -430,9 +430,10 @@ public: virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; }; /* Query substitute info (i.e. references, derivers and download - sizes) of a map of paths to their optional ca values. If a path - does not have substitute info, it's omitted from the resulting - ‘infos’ map. */ + sizes) of a map of paths to their optional ca values. The info + of the first succeeding substituter for each path will be + returned. If a path does not have substitute info, it's omitted + from the resulting ‘infos’ map. */ virtual void querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos) { return; }; diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 3a6415ee3..f880c0fc5 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -6,7 +6,7 @@ libutil_DIR := $(d) libutil_SOURCES := $(wildcard $(d)/*.cc) -libutil_LDFLAGS = -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context +libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context ifeq ($(HAVE_LIBCPUID), 1) libutil_LDFLAGS += -lcpuid diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 6b9b850ca..7d6090e0a 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -163,7 +163,7 @@ struct JSONLogger : Logger { void write(const nlohmann::json & json) { - prevLogger.log(lvlError, "@nix " + json.dump()); + prevLogger.log(lvlError, "@nix " + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace)); } void log(Verbosity lvl, const FormatOrString & fs) override diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh index 2549ef496..d6bf53bb8 100644 --- a/src/libutil/ref.hh +++ b/src/libutil/ref.hh @@ -99,4 +99,47 @@ make_ref(Args&&... args) return ref<T>(p); } + +/* A non-nullable pointer. + This is similar to a C++ "& reference", but mutable. + This is similar to ref<T> but backed by a regular pointer instead of a smart pointer. + */ +template<typename T> +class ptr { +private: + T * p; + +public: + ptr<T>(const ptr<T> & r) + : p(r.p) + { } + + explicit ptr<T>(T * p) + : p(p) + { + if (!p) + throw std::invalid_argument("null pointer cast to ptr"); + } + + T* operator ->() const + { + return &*p; + } + + T& operator *() const + { + return *p; + } + + bool operator == (const ptr<T> & other) const + { + return p == other.p; + } + + bool operator != (const ptr<T> & other) const + { + return p != other.p; + } +}; + } diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 24905130d..50e691a3d 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -39,32 +39,30 @@ void TarArchive::check(int err, const std::string & reason) throw Error(reason, archive_error_string(this->archive)); } -TarArchive::TarArchive(Source & source, bool raw) : buffer(4096) +TarArchive::TarArchive(Source & source, bool raw) + : source(&source), buffer(4096) { - this->archive = archive_read_new(); - this->source = &source; - - if (!raw) { - archive_read_support_filter_all(archive); + init(); + if (!raw) archive_read_support_format_all(archive); - } else { - archive_read_support_filter_all(archive); + else archive_read_support_format_raw(archive); - archive_read_support_format_empty(archive); - } check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)"); } - TarArchive::TarArchive(const Path & path) { - this->archive = archive_read_new(); - - archive_read_support_filter_all(archive); + init(); archive_read_support_format_all(archive); check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); } +void TarArchive::init() +{ + archive = archive_read_new(); + archive_read_support_filter_all(archive); +} + void TarArchive::close() { check(archive_read_close(this->archive), "Failed to close archive (%s)"); @@ -87,13 +85,16 @@ static void extract_archive(TarArchive & archive, const Path & destDir) struct archive_entry * entry; int r = archive_read_next_header(archive.archive, &entry); if (r == ARCHIVE_EOF) break; - else if (r == ARCHIVE_WARN) + auto name = archive_entry_pathname(entry); + if (!name) + throw Error("cannot get archive member name: %s", archive_error_string(archive.archive)); + if (r == ARCHIVE_WARN) warn(archive_error_string(archive.archive)); else archive.check(r); archive_entry_set_pathname(entry, - (destDir + "/" + archive_entry_pathname(entry)).c_str()); + (destDir + "/" + name).c_str()); archive.check(archive_read_extract(archive.archive, entry, flags)); } diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh index 4d9141fd4..f107a7e2e 100644 --- a/src/libutil/tarfile.hh +++ b/src/libutil/tarfile.hh @@ -17,10 +17,13 @@ struct TarArchive { // disable copy constructor TarArchive(const TarArchive &) = delete; + void init(); + void close(); ~TarArchive(); }; + void unpackTarfile(Source & source, const Path & destDir); void unpackTarfile(const Path & tarFile, const Path & destDir); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index d1270cd31..72405ac83 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -4,6 +4,7 @@ #include "finally.hh" #include "serialise.hh" +#include <array> #include <cctype> #include <cerrno> #include <climits> diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 5ceb2ae67..1fd4bcbd3 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -131,9 +131,9 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, state.forceValue(topLevel); PathSet context; Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath)); - auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(aDrvPath.pos ? *(aDrvPath.pos) : noPos, *(aDrvPath.value), context)); + auto topLevelDrv = state.store->parseStorePath(state.coerceToPath(*aDrvPath.pos, *aDrvPath.value, context)); Attr & aOutPath(*topLevel.attrs->find(state.sOutPath)); - Path topLevelOut = state.coerceToPath(aOutPath.pos ? *(aOutPath.pos) : noPos, *(aOutPath.value), context); + Path topLevelOut = state.coerceToPath(*aOutPath.pos, *aOutPath.value, context); /* Realise the resulting store expression. */ debug("building user environment"); diff --git a/src/nix/build.cc b/src/nix/build.cc index 13eb66ac6..ce6df7df8 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -54,7 +54,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile { auto buildables = build( getEvalStore(), store, - dryRun ? Realise::Nothing : Realise::Outputs, + dryRun ? Realise::Derivation : Realise::Outputs, installables, buildMode); if (json) logger->cout("%s", derivedPathsWithHintsToJSON(buildables, store).dump()); diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index cedb5704c..aca024bca 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -59,7 +59,7 @@ struct CmdBundle : InstallableCommand Strings getDefaultFlakeAttrPathPrefixes() override { - Strings res{"apps." + settings.thisSystem.get() + ".", "packages"}; + Strings res{"apps." + settings.thisSystem.get() + "."}; for (auto & s : SourceExprCommand::getDefaultFlakeAttrPathPrefixes()) res.push_back(s); return res; diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 40e9d98b0..c20b9f272 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -331,7 +331,7 @@ struct Common : InstallableCommand, MixProfile Strings getDefaultFlakeAttrPathPrefixes() override { auto res = SourceExprCommand::getDefaultFlakeAttrPathPrefixes(); - res.emplace_front("devShells." + settings.thisSystem.get()); + res.emplace_front("devShells." + settings.thisSystem.get() + "."); return res; } diff --git a/src/nix/registry.md b/src/nix/registry.md index 557e5795b..a1674bd2e 100644 --- a/src/nix/registry.md +++ b/src/nix/registry.md @@ -41,7 +41,7 @@ A registry is a JSON file with the following format: ```json { "version": 2, - [ + "flakes": [ { "from": { "type": "indirect", diff --git a/src/nix/run.cc b/src/nix/run.cc index 7597b61f7..b01fdebaa 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -167,7 +167,7 @@ struct CmdRun : InstallableCommand Strings getDefaultFlakeAttrPathPrefixes() override { - Strings res{"apps." + settings.thisSystem.get() + ".", "packages"}; + Strings res{"apps." + settings.thisSystem.get() + "."}; for (auto & s : SourceExprCommand::getDefaultFlakeAttrPathPrefixes()) res.push_back(s); return res; diff --git a/tests/fetchGitSubmodules.sh b/tests/fetchGitSubmodules.sh index 5f104355f..03d46088e 100644 --- a/tests/fetchGitSubmodules.sh +++ b/tests/fetchGitSubmodules.sh @@ -42,8 +42,8 @@ r1=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \ r2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = false; }).outPath") r3=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath") -[[ $r1 == $r2 ]] -[[ $r2 != $r3 ]] +[[ $r1 == $r3 ]] +[[ $r2 != $r1 ]] r4=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; }).outPath") r5=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = false; }).outPath") @@ -52,13 +52,13 @@ r7=$(nix eval --raw --expr "(builtins.fetchGit { url = $rootRepo; ref = \"master r8=$(nix eval --raw --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).outPath") [[ $r1 == $r4 ]] -[[ $r4 == $r5 ]] +[[ $r4 == $r6 ]] [[ $r3 == $r6 ]] [[ $r6 == $r7 ]] [[ $r7 == $r8 ]] have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; }).submodules") -[[ $have_submodules == false ]] +[[ $have_submodules == true ]] have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = false; }).submodules") [[ $have_submodules == false ]] @@ -66,8 +66,8 @@ have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \ have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).submodules") [[ $have_submodules == true ]] -pathWithoutSubmodules=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath") -pathWithSubmodules=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath") +pathWithoutSubmodules=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = false; }).outPath") +pathWithSubmodules=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath") pathWithSubmodulesAgain=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath") pathWithSubmodulesAgainWithRef=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath") diff --git a/tests/fetchurl.sh b/tests/fetchurl.sh index cd84e9a4c..3d1685f43 100644 --- a/tests/fetchurl.sh +++ b/tests/fetchurl.sh @@ -5,7 +5,7 @@ clearStore # Test fetching a flat file. hash=$(nix-hash --flat --type sha256 ./fetchurl.sh) -outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha256 $hash --no-out-link) +outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha256 $hash --no-out-link) cmp $outPath fetchurl.sh @@ -14,7 +14,7 @@ clearStore hash=$(nix hash file --type sha512 --base64 ./fetchurl.sh) -outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link) +outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr sha512 $hash --no-out-link) cmp $outPath fetchurl.sh @@ -25,7 +25,7 @@ hash=$(nix hash file ./fetchurl.sh) [[ $hash =~ ^sha256- ]] -outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr hash $hash --no-out-link) +outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$(pwd)/fetchurl.sh --argstr hash $hash --no-out-link) cmp $outPath fetchurl.sh @@ -38,10 +38,10 @@ hash=$(nix hash file --type sha256 --base16 ./fetchurl.sh) storePath=$(nix --store $other_store store add-file ./fetchurl.sh) -outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr sha256 $hash --no-out-link --substituters $other_store) +outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr sha256 $hash --no-out-link --substituters $other_store) # Test hashed mirrors with an SRI hash. -nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr hash $(nix hash to-sri --type sha256 $hash) \ +nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file:///no-such-dir/fetchurl.sh --argstr hash $(nix hash to-sri --type sha256 $hash) \ --no-out-link --substituters $other_store # Test unpacking a NAR. @@ -55,7 +55,7 @@ nix-store --dump $TEST_ROOT/archive > $nar hash=$(nix-hash --flat --type sha256 $nar) -outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$nar --argstr sha256 $hash \ +outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$nar --argstr sha256 $hash \ --arg unpack true --argstr name xyzzy --no-out-link) echo $outPath | grep -q 'xyzzy' @@ -69,7 +69,7 @@ nix-store --delete $outPath narxz=$TEST_ROOT/archive.nar.xz rm -f $narxz xz --keep $nar -outPath=$(nix-build --expr 'import <nix/fetchurl.nix>' --argstr url file://$narxz --argstr sha256 $hash \ +outPath=$(nix-build -vvvvv --expr 'import <nix/fetchurl.nix>' --argstr url file://$narxz --argstr sha256 $hash \ --arg unpack true --argstr name xyzzy --no-out-link) test -x $outPath/fetchurl.sh diff --git a/tests/flakes.sh b/tests/flakes.sh index 9e1b5b508..610bab391 100644 --- a/tests/flakes.sh +++ b/tests/flakes.sh @@ -23,13 +23,19 @@ flake6Dir=$TEST_ROOT/flake6 flake7Dir=$TEST_ROOT/flake7 templatesDir=$TEST_ROOT/templates nonFlakeDir=$TEST_ROOT/nonFlake +badFlakeDir=$TEST_ROOT/badFlake flakeA=$TEST_ROOT/flakeA flakeB=$TEST_ROOT/flakeB flakeGitBare=$TEST_ROOT/flakeGitBare +flakeFollowsA=$TEST_ROOT/follows/flakeA +flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB +flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC +flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD +flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE -for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB; do +for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do rm -rf $repo $repo.tmp - mkdir $repo + mkdir -p $repo git -C $repo init git -C $repo config user.email "foobar@example.com" git -C $repo config user.name "Foobar" @@ -681,3 +687,98 @@ git -C $flakeB commit -a -m 'Foo' # Test list-inputs with circular dependencies nix flake metadata $flakeA + +# Test flake follow paths +mkdir -p $flakeFollowsB +mkdir -p $flakeFollowsC +mkdir -p $flakeFollowsD +mkdir -p $flakeFollowsE + +cat > $flakeFollowsA/flake.nix <<EOF +{ + description = "Flake A"; + inputs = { + B = { + url = "path:./flakeB"; + inputs.foobar.follows = "D"; + }; + + D.url = "path:./flakeD"; + foobar.url = "path:./flakeE"; + }; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsB/flake.nix <<EOF +{ + description = "Flake B"; + inputs = { + foobar.url = "path:./../flakeE"; + C = { + url = "path:./flakeC"; + inputs.foobar.follows = "foobar"; + }; + }; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsC/flake.nix <<EOF +{ + description = "Flake C"; + inputs = { + foobar.url = "path:./../../flakeE"; + }; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsD/flake.nix <<EOF +{ + description = "Flake D"; + inputs = {}; + outputs = { ... }: {}; +} +EOF + +cat > $flakeFollowsE/flake.nix <<EOF +{ + description = "Flake D"; + inputs = {}; + outputs = { ... }: {}; +} +EOF + +git -C $flakeFollowsA add flake.nix flakeB/flake.nix \ + flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix + +nix flake lock $flakeFollowsA + +[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]] +[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["D"]' ]] +[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]] + +# Ensure a relative path is not allowed to go outside the store path +cat > $flakeFollowsA/flake.nix <<EOF +{ + description = "Flake A"; + inputs = { + B.url = "path:./../../flakeB"; + }; + outputs = { ... }: {}; +} +EOF + +git -C $flakeFollowsA add flake.nix + +nix flake lock $flakeFollowsA 2>&1 | grep 'this is a security violation' + +# Test flake in store does not evaluate +rm -rf $badFlakeDir +mkdir $badFlakeDir +echo INVALID > $badFlakeDir/flake.nix +nix store delete $(nix store add-path $badFlakeDir) + +[[ $(nix path-info $(nix store add-path $flake1Dir)) =~ flake1 ]] +[[ $(nix path-info path:$(nix store add-path $flake1Dir)) =~ simple ]] diff --git a/tests/lang/eval-fail-antiquoted-path.nix b/tests/lang/eval-fail-nonexist-path.nix index f2f08107b..f2f08107b 100644 --- a/tests/lang/eval-fail-antiquoted-path.nix +++ b/tests/lang/eval-fail-nonexist-path.nix diff --git a/tests/lang/eval-okay-path-antiquotation.nix b/tests/lang/eval-okay-path-antiquotation.nix new file mode 100644 index 000000000..497d7c1c7 --- /dev/null +++ b/tests/lang/eval-okay-path-antiquotation.nix @@ -0,0 +1,12 @@ +let + foo = "foo"; +in +{ + simple = ./${foo}; + surrounded = ./a-${foo}-b; + absolute = /${foo}; + expr = ./${foo + "/bar"}; + home = ~/${foo}; + notfirst = ./bar/${foo}; + slashes = /${foo}/${"bar"}; +} |