aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/libcmd/installables.cc23
-rw-r--r--src/libexpr/eval-settings.cc8
-rw-r--r--src/libexpr/eval-settings.hh14
-rw-r--r--src/libexpr/eval.cc135
-rw-r--r--src/libexpr/eval.hh9
-rw-r--r--src/libexpr/primops.cc12
-rw-r--r--src/libfetchers/git.cc2
-rw-r--r--src/libfetchers/tarball.cc2
-rw-r--r--src/libmain/common-args.cc5
-rw-r--r--src/libstore/binary-cache-store.cc14
-rw-r--r--src/libstore/binary-cache-store.hh2
-rw-r--r--src/libstore/build/derivation-goal.cc3
-rw-r--r--src/libstore/build/local-derivation-goal.cc4
-rw-r--r--src/libstore/builtins/fetchurl.cc2
-rw-r--r--src/libstore/filetransfer.cc128
-rw-r--r--src/libstore/filetransfer.hh19
-rw-r--r--src/libstore/http-binary-cache-store.cc8
-rw-r--r--src/libstore/local-binary-cache-store.cc2
-rw-r--r--src/libstore/local-store.cc4
-rw-r--r--src/libstore/store-api.cc2
-rw-r--r--src/libutil/compression.cc115
-rw-r--r--src/libutil/compression.hh1
-rw-r--r--src/libutil/error.hh2
-rw-r--r--src/libutil/file-system.cc9
-rw-r--r--src/libutil/file-system.hh3
-rw-r--r--src/libutil/hash.cc2
-rw-r--r--src/libutil/logging.cc7
-rw-r--r--src/nix/add-to-store.cc2
-rw-r--r--src/nix/hash.cc2
-rw-r--r--src/nix/prefetch.cc2
-rw-r--r--src/nix/upgrade-nix.cc2
31 files changed, 345 insertions, 200 deletions
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index a10214561..711cf1b07 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -393,13 +393,10 @@ ref<eval_cache::EvalCache> openEvalCache(
EvalState & state,
std::shared_ptr<flake::LockedFlake> lockedFlake)
{
- auto fingerprint = lockedFlake->getFingerprint();
- return make_ref<nix::eval_cache::EvalCache>(
- evalSettings.useEvalCache && evalSettings.pureEval
- ? std::optional { std::cref(fingerprint) }
- : std::nullopt,
- state,
- [&state, lockedFlake]()
+ auto fingerprint = evalSettings.useEvalCache && evalSettings.pureEval
+ ? std::make_optional(lockedFlake->getFingerprint())
+ : std::nullopt;
+ auto rootLoader = [&state, lockedFlake]()
{
/* For testing whether the evaluation cache is
complete. */
@@ -415,7 +412,17 @@ ref<eval_cache::EvalCache> openEvalCache(
assert(aOutputs);
return aOutputs->value;
- });
+ };
+
+ if (fingerprint) {
+ auto search = state.evalCaches.find(fingerprint.value());
+ if (search == state.evalCaches.end()) {
+ search = state.evalCaches.emplace(fingerprint.value(), make_ref<nix::eval_cache::EvalCache>(fingerprint, state, rootLoader)).first;
+ }
+ return search->second;
+ } else {
+ return make_ref<nix::eval_cache::EvalCache>(std::nullopt, state, rootLoader);
+ }
}
Installables SourceExprCommand::parseInstallables(
diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc
index 105fd3e9d..0bdf1b9a5 100644
--- a/src/libexpr/eval-settings.cc
+++ b/src/libexpr/eval-settings.cc
@@ -63,11 +63,9 @@ Strings EvalSettings::getDefaultNixPath()
}
};
- if (!evalSettings.restrictEval && !evalSettings.pureEval) {
- add(getNixDefExpr() + "/channels");
- add(rootChannelsDir() + "/nixpkgs", "nixpkgs");
- add(rootChannelsDir());
- }
+ add(getNixDefExpr() + "/channels");
+ add(rootChannelsDir() + "/nixpkgs", "nixpkgs");
+ add(rootChannelsDir());
return res;
}
diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh
index cd73d195f..4673c509b 100644
--- a/src/libexpr/eval-settings.hh
+++ b/src/libexpr/eval-settings.hh
@@ -75,8 +75,17 @@ struct EvalSettings : Config
R"(
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
- - Restrict file system and network access to files specified by cryptographic hash
- - Disable [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
+ - File system and network access is restricted to accesses to immutable data only:
+ - Path literals relative to the home directory like `~/lix` are rejected at parse time.
+ - Access to absolute paths that did not result from Nix language evaluation is rejected when such paths are given as parameters to builtins like, for example, [`builtins.readFile`](@docroot@/language/builtins.md#builtins-readFile).
+
+ Access is nonetheless allowed to (absolute) paths in the Nix store that are returned by builtins like [`builtins.filterSource`](@docroot@/language/builtins.md#builtins-filterSource), [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball) and similar.
+ - Impure fetches such as not specifying a commit ID for `builtins.fetchGit` or not specifying a hash for `builtins.fetchTarball` are rejected.
+ - In flakes, access to relative paths outside of the root of the flake's source tree (often, a git repository) is rejected.
+ - The evaluator ignores `NIX_PATH`, `-I` and the `nix-path` setting. Thus, [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath) is an empty list.
+ - The builtins [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime) are absent from `builtins`.
+ - [`builtins.getEnv`](@docroot@/language/builtin-constants.md#builtins-currentSystem) always returns empty string for any variable.
+ - [`builtins.storePath`](@docroot@/language/builtins.md#builtins-storePath) throws an error (Lix may change this, tracking issue: <https://git.lix.systems/lix-project/lix/issues/402>)
)"
};
@@ -98,6 +107,7 @@ struct EvalSettings : Config
allowed to access `https://github.com/NixOS/patchelf.git`.
)"};
+
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
R"(
If set to `true`, the Nix evaluator will trace every function call.
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 25d98b23b..9bd27e22d 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -418,7 +418,7 @@ EvalState::EvalState(
}
if (evalSettings.restrictEval || evalSettings.pureEval) {
- allowedPaths = PathSet();
+ allowedPaths = std::optional(PathSet());
for (auto & i : searchPath.elements) {
auto r = resolveSearchPathPath(i.path);
@@ -1533,6 +1533,66 @@ public:
};
};
+/** Currently these each just take one, but maybe in the future we could have diagnostics
+ * for all unexpected and missing arguments?
+ */
+struct FormalsMatch
+{
+ std::vector<Symbol> missing;
+ std::vector<Symbol> unexpected;
+};
+
+/** Matchup an attribute argument set to a lambda's formal arguments,
+ * or return what arguments were required but not given, or given but not allowed.
+ * (currently returns only one, for each).
+ */
+FormalsMatch matchupFormals(EvalState & state, Env & env, Displacement & displ, ExprLambda const & lambda, Bindings & attrs)
+{
+ size_t attrsUsed = 0;
+
+ for (auto const & formal : lambda.formals->formals) {
+
+ // The attribute whose name matches the name of the formal we're matching up, if it exists.
+ Attr const * matchingArg = attrs.get(formal.name);
+ if (matchingArg) {
+ attrsUsed += 1;
+ env.values[displ] = matchingArg->value;
+ displ += 1;
+
+ // We're done here. Move on to the next formal.
+ continue;
+ }
+
+ // The argument for this formal wasn't given.
+ // If the formal has a default, use it.
+ if (formal.def) {
+ env.values[displ] = formal.def->maybeThunk(state, env);
+ displ += 1;
+ } else {
+ // Otherwise, let our caller know what was missing.
+ return FormalsMatch{
+ .missing = {formal.name},
+ };
+ }
+ }
+
+ // Check for unexpected extra arguments.
+ if (!lambda.formals->ellipsis && attrsUsed != attrs.size()) {
+ // Return the first unexpected argument.
+ for (Attr const & attr : attrs) {
+ if (!lambda.formals->has(attr.name)) {
+ return FormalsMatch{
+ .unexpected = {attr.name},
+ };
+ }
+ }
+
+ abort(); // unreachable.
+ }
+
+ return FormalsMatch{};
+}
+
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
{
if (callDepth > evalSettings.maxCallDepth)
@@ -1586,53 +1646,42 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
if (lambda.arg)
env2.values[displ++] = args[0];
- /* For each formal argument, get the actual argument. If
- there is no matching actual argument but the formal
- argument has a default, use the default. */
- size_t attrsUsed = 0;
- for (auto & i : lambda.formals->formals) {
- auto j = args[0]->attrs->get(i.name);
- if (!j) {
- if (!i.def) {
- error<TypeError>("function '%1%' called without required argument '%2%'",
- lambda.getName(symbols),
- symbols[i.name])
- .atPos(lambda.pos)
- .withTrace(pos, "from call site")
- .withFrame(*fun.lambda.env, lambda)
- .debugThrow();
- }
- env2.values[displ++] = i.def->maybeThunk(*this, env2);
- } else {
- attrsUsed++;
- env2.values[displ++] = j->value;
+ ///* For each formal argument, get the actual argument. If
+ // there is no matching actual argument but the formal
+ // argument has a default, use the default. */
+ auto const formalsMatch = matchupFormals(
+ *this,
+ env2,
+ displ,
+ lambda,
+ *args[0]->attrs
+ );
+ for (auto const & missingArg : formalsMatch.missing) {
+ auto const missing = symbols[missingArg];
+ error<TypeError>("function '%s' called without required argument '%s'", lambda.getName(symbols), missing)
+ .atPos(lambda.pos)
+ .withTrace(pos, "from call site")
+ .withFrame(*fun.lambda.env, lambda)
+ .debugThrow();
+ }
+ for (auto const & unexpectedArg : formalsMatch.unexpected) {
+ auto const unex = symbols[unexpectedArg];
+ std::set<std::string> formalNames;
+ for (auto const & formal : lambda.formals->formals) {
+ formalNames.insert(symbols[formal.name]);
}
+ auto sug = Suggestions::bestMatches(formalNames, unex);
+ error<TypeError>("function '%s' called with unexpected argument '%s'", lambda.getName(symbols), unex)
+ .atPos(lambda.pos)
+ .withTrace(pos, "from call site")
+ .withSuggestions(sug)
+ .withFrame(*fun.lambda.env, lambda)
+ .debugThrow();
}
- /* Check that each actual argument is listed as a formal
- argument (unless the attribute match specifies a `...'). */
- if (!lambda.formals->ellipsis && attrsUsed != args[0]->attrs->size()) {
- /* Nope, so show the first unexpected argument to the
- user. */
- for (auto & i : *args[0]->attrs)
- if (!lambda.formals->has(i.name)) {
- std::set<std::string> formalNames;
- for (auto & formal : lambda.formals->formals)
- formalNames.insert(symbols[formal.name]);
- auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]);
- error<TypeError>("function '%1%' called with unexpected argument '%2%'",
- lambda.getName(symbols),
- symbols[i.name])
- .atPos(lambda.pos)
- .withTrace(pos, "from call site")
- .withSuggestions(suggestions)
- .withFrame(*fun.lambda.env, lambda)
- .debugThrow();
- }
- abort(); // can't happen
- }
}
+
nrFunctionCalls++;
if (countCalls) incrFunctionCall(&lambda);
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index ec6e2bb5e..8e390e46d 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -33,6 +33,10 @@ class EvalState;
class StorePath;
struct SingleDerivedPath;
enum RepairFlag : bool;
+struct MemoryInputAccessor;
+namespace eval_cache {
+ class EvalCache;
+}
/**
@@ -234,6 +238,11 @@ public:
return *new EvalErrorBuilder<T>(*this, args...);
}
+ /**
+ * A cache for evaluation caches, so as to reuse the same root value if possible
+ */
+ std::map<const Hash, ref<eval_cache::EvalCache>> evalCaches;
+
private:
/* Cache for calls to addToStore(); maps source paths to the store
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 4b7e61dfe..20851da70 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -923,14 +923,15 @@ static RegisterPrimOp primop_getEnv({
.args = {"s"},
.doc = R"(
`getEnv` returns the value of the environment variable *s*, or an
- empty string if the variable doesn’t exist. This function should be
+ empty string if the variable doesn't exist. This function should be
used with care, as it can introduce all sorts of nasty environment
dependencies in your Nix expression.
- `getEnv` is used in Nix Packages to locate the file
- `~/.nixpkgs/config.nix`, which contains user-local settings for Nix
- Packages. (That is, it does a `getEnv "HOME"` to locate the user’s
- home directory.)
+ `getEnv` is used in nixpkgs for evil impurities such as locating the file
+ `~/.config/nixpkgs/config.nix` which contains user-local settings for nixpkgs.
+ (That is, it does a `getEnv "HOME"` to locate the user's home directory.)
+
+ When in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval), this function always returns an empty string.
)",
.fun = prim_getEnv,
});
@@ -1506,6 +1507,7 @@ static RegisterPrimOp primop_storePath({
in a new path (e.g. `/nix/store/ld01dnzc…-source-source`).
Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).
+ Lix may change this, tracking issue: <https://git.lix.systems/lix-project/lix/issues/402>
See also [`builtins.fetchClosure`](#builtins-fetchClosure).
)",
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 07cbc781c..2817fde23 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -232,7 +232,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
if (S_ISDIR(st.st_mode)) {
auto prefix = file + "/";
auto i = files.lower_bound(prefix);
- return i != files.end() && (*i).starts_with(prefix);
+ return (i != files.end() && (*i).starts_with(prefix)) || files.count(file);
}
return files.count(file);
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index cda6b7acb..c903895e2 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -46,7 +46,7 @@ DownloadFileResult downloadFile(
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
FileTransferResult res;
try {
- res = getFileTransfer()->download(request);
+ res = getFileTransfer()->transfer(request);
} catch (FileTransferError & e) {
if (cached) {
warn("%s; using cached version", e.msg());
diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc
index 20b5befe4..8fcb10325 100644
--- a/src/libmain/common-args.cc
+++ b/src/libmain/common-args.cc
@@ -1,5 +1,6 @@
#include "common-args.hh"
#include "args/root.hh"
+#include "error.hh"
#include "globals.hh"
#include "loggers.hh"
#include "logging.hh"
@@ -14,14 +15,14 @@ MixCommonArgs::MixCommonArgs(const std::string & programName)
.shortName = 'v',
.description = "Increase the logging verbosity level.",
.category = loggingCategory,
- .handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
+ .handler = {[]() { verbosity = verbosityFromIntClamped(int(verbosity) + 1); }},
});
addFlag({
.longName = "quiet",
.description = "Decrease the logging verbosity level.",
.category = loggingCategory,
- .handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
+ .handler = {[]() { verbosity = verbosityFromIntClamped(int(verbosity) - 1); }},
});
addFlag({
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 7df55a32d..d6ded0a24 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -38,7 +38,7 @@ void BinaryCacheStore::init()
{
std::string cacheInfoFile = "nix-cache-info";
- auto cacheInfo = getFile(cacheInfoFile);
+ auto cacheInfo = getFileContents(cacheInfoFile);
if (!cacheInfo) {
upsertFile(cacheInfoFile, "StoreDir: " + storeDir + "\n", "text/x-nix-cache-info");
} else {
@@ -69,10 +69,10 @@ void BinaryCacheStore::upsertFile(const std::string & path,
void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
{
- sink(*getFile(path));
+ sink(*getFileContents(path));
}
-std::optional<std::string> BinaryCacheStore::getFile(const std::string & path)
+std::optional<std::string> BinaryCacheStore::getFileContents(const std::string & path)
{
StringSink sink;
try {
@@ -359,7 +359,7 @@ std::shared_ptr<const ValidPathInfo> BinaryCacheStore::queryPathInfoUncached(con
auto narInfoFile = narInfoFileFor(storePath);
- auto data = getFile(narInfoFile);
+ auto data = getFileContents(narInfoFile);
if (!data) return nullptr;
@@ -385,7 +385,7 @@ StorePath BinaryCacheStore::addToStore(
if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter);
} else {
- readFile(srcPath, sink);
+ readFileSource(srcPath)->drainInto(sink);
}
auto h = sink.finish().first;
@@ -446,7 +446,7 @@ std::shared_ptr<const Realisation> BinaryCacheStore::queryRealisationUncached(co
{
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
- auto data = getFile(outputInfoFilePath);
+ auto data = getFileContents(outputInfoFilePath);
if (!data) return {};
auto realisation = Realisation::fromJSON(
@@ -486,7 +486,7 @@ std::optional<std::string> BinaryCacheStore::getBuildLogExact(const StorePath &
debug("fetching build log from binary cache '%s/%s'", getUri(), logPath);
- return getFile(logPath);
+ return getFileContents(logPath);
}
void BinaryCacheStore::addBuildLog(const StorePath & drvPath, std::string_view log)
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 510965d12..cd963fbf9 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -85,7 +85,7 @@ public:
*/
virtual void getFile(const std::string & path, Sink & sink);
- virtual std::optional<std::string> getFile(const std::string & path);
+ virtual std::optional<std::string> getFileContents(const std::string & path);
public:
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 97ba994ad..46399b0a8 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -1553,11 +1553,12 @@ void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result)
Goal::waiteeDone(waitee, result);
if (!useDerivation) return;
- auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
auto * dg = dynamic_cast<DerivationGoal *>(&*waitee);
if (!dg) return;
+ auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
+
auto * nodeP = fullDrv.inputDrvs.findSlot(DerivedPath::Opaque { .path = dg->drvPath });
if (!nodeP) return;
auto & outputs = nodeP->value;
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 7066f5c93..bae821266 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -2486,7 +2486,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
HashModuloSink caSink { outputHash.hashType, oldHashPart };
std::visit(overloaded {
[&](const TextIngestionMethod &) {
- readFile(actualPath, caSink);
+ readFileSource(actualPath)->drainInto(caSink);
},
[&](const FileIngestionMethod & m2) {
switch (m2) {
@@ -2494,7 +2494,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
dumpPath(actualPath, caSink);
break;
case FileIngestionMethod::Flat:
- readFile(actualPath, caSink);
+ readFileSource(actualPath)->drainInto(caSink);
break;
}
},
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index 6bf46dad8..37d640fe4 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -41,7 +41,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
auto decompressor = makeDecompressionSink(
unpack && mainUrl.ends_with(".xz") ? "xz" : "none", sink);
- fileTransfer->download(std::move(request), *decompressor);
+ fileTransfer->download(std::move(request))->drainInto(*decompressor);
decompressor->finish();
});
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index 156ab6f7a..fcb947f96 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -686,16 +686,8 @@ struct curlFileTransfer : public FileTransfer
->callback.get_future();
}
- void download(FileTransferRequest && request, Sink & sink) override
+ box_ptr<Source> download(FileTransferRequest && request) override
{
- /* Note: we can't call 'sink' via request.dataCallback, because
- that would cause the sink to execute on the fileTransfer
- thread. If 'sink' is a coroutine, this will fail. Also, if the
- sink is expensive (e.g. one that does decompression and writing
- to the Nix store), it would stall the download thread too much.
- Therefore we use a buffer to communicate data between the
- download thread and the calling thread. */
-
struct State {
bool done = false, failed = false;
std::exception_ptr exc;
@@ -705,13 +697,6 @@ struct curlFileTransfer : public FileTransfer
auto _state = std::make_shared<Sync<State>>();
- /* In case of an exception, wake up the download thread. */
- Finally finally([&]() {
- auto state(_state->lock());
- state->failed |= std::uncaught_exceptions() != 0;
- state->request.notify_one();
- });
-
enqueueFileTransfer(
request,
[_state](std::exception_ptr ex) {
@@ -750,50 +735,99 @@ struct curlFileTransfer : public FileTransfer
}
);
- std::unique_ptr<FinishSink> decompressor;
-
- while (true) {
- checkInterrupt();
-
+ struct InnerSource : Source
+ {
+ const std::shared_ptr<Sync<State>> _state;
std::string chunk;
+ std::string_view buffered;
- /* Grab data if available, otherwise wait for the download
- thread to wake us up. */
+ explicit InnerSource(const std::shared_ptr<Sync<State>> & state) : _state(state) {}
+
+ ~InnerSource()
{
+ // wake up the download thread if it's still going and have it abort
auto state(_state->lock());
+ state->failed |= !state->done;
+ state->request.notify_one();
+ }
- if (state->data.empty()) {
-
- if (state->done) {
- if (state->exc) std::rethrow_exception(state->exc);
- if (decompressor) {
- decompressor->finish();
+ void awaitData(Sync<State>::Lock & state)
+ {
+ /* Grab data if available, otherwise wait for the download
+ thread to wake us up. */
+ while (buffered.empty()) {
+ if (state->data.empty()) {
+ if (state->done) {
+ if (state->exc) {
+ std::rethrow_exception(state->exc);
+ }
+ return;
}
- return;
+
+ state.wait(state->avail);
}
- state.wait(state->avail);
+ chunk = std::move(state->data);
+ buffered = chunk;
+ state->request.notify_one();
+ }
+ }
- if (state->data.empty()) continue;
+ size_t read(char * data, size_t len) override
+ {
+ auto readPartial = [this](char * data, size_t len) {
+ const auto available = std::min(len, buffered.size());
+ memcpy(data, buffered.data(), available);
+ buffered.remove_prefix(available);
+ return available;
+ };
+ size_t total = readPartial(data, len);
+
+ while (total < len) {
+ {
+ auto state(_state->lock());
+ awaitData(state);
+ }
+ const auto current = readPartial(data + total, len - total);
+ total += current;
+ if (total == 0 || current == 0) {
+ break;
+ }
+ }
+
+ if (total == 0) {
+ throw EndOfFile("download finished");
}
- chunk = std::move(state->data);
- /* Reset state->data after the move, since we check data.empty() */
- state->data = "";
+ return total;
+ }
+ };
+
+ struct DownloadSource : Source
+ {
+ InnerSource inner;
+ std::unique_ptr<Source> decompressor;
+
+ explicit DownloadSource(const std::shared_ptr<Sync<State>> & state) : inner(state) {}
+
+ size_t read(char * data, size_t len) override
+ {
+ checkInterrupt();
if (!decompressor) {
- decompressor = makeDecompressionSink(state->encoding, sink);
+ auto state(inner._state->lock());
+ inner.awaitData(state);
+ decompressor = makeDecompressionSource(state->encoding, inner);
}
- state->request.notify_one();
+ return decompressor->read(data, len);
}
+ };
- /* Flush the data to the sink and wake up the download thread
- if it's blocked on a full buffer. We don't hold the state
- lock while doing this to prevent blocking the download
- thread if sink() takes a long time. */
- (*decompressor)(chunk);
- }
+ auto source = make_box_ptr<DownloadSource>(_state);
+ auto lock(_state->lock());
+ source->inner.awaitData(lock);
+ return source;
}
};
@@ -817,14 +851,8 @@ ref<FileTransfer> makeFileTransfer()
return makeCurlFileTransfer();
}
-FileTransferResult FileTransfer::download(const FileTransferRequest & request)
-{
- return enqueueFileTransfer(request).get();
-}
-
-FileTransferResult FileTransfer::upload(const FileTransferRequest & request)
+FileTransferResult FileTransfer::transfer(const FileTransferRequest & request)
{
- /* Note: this method is the same as download, but helps in readability */
return enqueueFileTransfer(request).get();
}
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index 23105e245..b2ee66312 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -1,6 +1,7 @@
#pragma once
///@file
+#include "box_ptr.hh"
#include "logging.hh"
#include "serialise.hh"
#include "types.hh"
@@ -99,20 +100,18 @@ struct FileTransfer
virtual std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request) = 0;
/**
- * Synchronously download a file.
+ * Synchronously transfer a file.
*/
- FileTransferResult download(const FileTransferRequest & request);
+ FileTransferResult transfer(const FileTransferRequest & request);
/**
- * Synchronously upload a file.
+ * Download a file, returning its contents through a source. Will not return
+ * before the transfer has fully started, ensuring that any errors thrown by
+ * the setup phase (e.g. HTTP 404 or similar errors) are not postponed to be
+ * thrown by the returned source. The source will only throw errors detected
+ * during the transfer itself (decompression errors, connection drops, etc).
*/
- FileTransferResult upload(const FileTransferRequest & request);
-
- /**
- * Download a file, writing its data to a sink. The sink will be
- * invoked on the thread of the caller.
- */
- virtual void download(FileTransferRequest && request, Sink & sink) = 0;
+ virtual box_ptr<Source> download(FileTransferRequest && request) = 0;
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 9fafabe65..06297e2eb 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -115,7 +115,7 @@ protected:
try {
FileTransferRequest request(makeRequest(path));
request.head = true;
- getFileTransfer()->download(request);
+ getFileTransfer()->transfer(request);
return true;
} catch (FileTransferError & e) {
/* S3 buckets return 403 if a file doesn't exist and the
@@ -135,7 +135,7 @@ protected:
req.data = StreamToSourceAdapter(istream).drain();
req.mimeType = mimeType;
try {
- getFileTransfer()->upload(req);
+ getFileTransfer()->transfer(req);
} catch (FileTransferError & e) {
throw UploadToHTTP("while uploading to HTTP binary cache at '%s': %s", cacheUri, e.msg());
}
@@ -155,7 +155,7 @@ protected:
checkEnabled();
auto request(makeRequest(path));
try {
- getFileTransfer()->download(std::move(request), sink);
+ getFileTransfer()->download(std::move(request))->drainInto(sink);
} catch (FileTransferError & e) {
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri());
@@ -164,7 +164,7 @@ protected:
}
}
- std::optional<std::string> getFile(const std::string & path) override
+ std::optional<std::string> getFileContents(const std::string & path) override
{
checkEnabled();
diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc
index 5684dcd80..5f6730476 100644
--- a/src/libstore/local-binary-cache-store.cc
+++ b/src/libstore/local-binary-cache-store.cc
@@ -71,7 +71,7 @@ protected:
void getFile(const std::string & path, Sink & sink) override
{
try {
- readFile(binaryCacheDir + "/" + path, sink);
+ readFileSource(binaryCacheDir + "/" + path)->drainInto(sink);
} catch (SysError & e) {
if (e.errNo == ENOENT)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 5e79630c6..6f441a0a1 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1890,7 +1890,7 @@ ContentAddress LocalStore::hashCAPath(
HashModuloSink caSink ( hashType, std::string(pathHash) );
std::visit(overloaded {
[&](const TextIngestionMethod &) {
- readFile(path, caSink);
+ readFileSource(path)->drainInto(caSink);
},
[&](const FileIngestionMethod & m2) {
switch (m2) {
@@ -1898,7 +1898,7 @@ ContentAddress LocalStore::hashCAPath(
dumpPath(path, caSink);
break;
case FileIngestionMethod::Flat:
- readFile(path, caSink);
+ readFileSource(path)->drainInto(caSink);
break;
}
},
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 4e1fb4bd5..244ecf256 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -279,7 +279,7 @@ StorePath Store::addToStore(
if (method == FileIngestionMethod::Recursive)
dumpPath(srcPath, sink, filter);
else
- readFile(srcPath, sink);
+ readFileSource(srcPath)->drainInto(sink);
});
return addToStoreFromDump(*source, name, method, hashAlgo, repair, references);
}
diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc
index 678557a58..d93a1b1d6 100644
--- a/src/libutil/compression.cc
+++ b/src/libutil/compression.cc
@@ -137,64 +137,64 @@ struct NoneSink : CompressionSink
void writeUnbuffered(std::string_view data) override { nextSink(data); }
};
-struct BrotliDecompressionSink : ChunkedCompressionSink
+struct BrotliDecompressionSource : Source
{
- Sink & nextSink;
- BrotliDecoderState * state;
- bool finished = false;
-
- BrotliDecompressionSink(Sink & nextSink) : nextSink(nextSink)
+ static constexpr size_t BUF_SIZE = 32 * 1024;
+ std::unique_ptr<char[]> buf;
+ size_t avail_in = 0;
+ const uint8_t * next_in;
+
+ Source * inner;
+ std::unique_ptr<BrotliDecoderState, void (*)(BrotliDecoderState *)> state;
+
+ BrotliDecompressionSource(Source & inner)
+ : buf(std::make_unique<char[]>(BUF_SIZE))
+ , inner(&inner)
+ , state{
+ BrotliDecoderCreateInstance(nullptr, nullptr, nullptr), BrotliDecoderDestroyInstance}
{
- state = BrotliDecoderCreateInstance(nullptr, nullptr, nullptr);
- if (!state)
+ if (!state) {
throw CompressionError("unable to initialize brotli decoder");
+ }
}
- ~BrotliDecompressionSink()
- {
- BrotliDecoderDestroyInstance(state);
- }
-
- void finish() override
- {
- flush();
- writeInternal({});
- }
-
- void writeInternal(std::string_view data) override
+ size_t read(char * data, size_t len) override
{
- auto next_in = (const uint8_t *) data.data();
- size_t avail_in = data.size();
- uint8_t * next_out = outbuf;
- size_t avail_out = sizeof(outbuf);
-
- while (!finished && (!data.data() || avail_in)) {
- checkInterrupt();
-
- if (!BrotliDecoderDecompressStream(state,
- &avail_in, &next_in,
- &avail_out, &next_out,
- nullptr))
- throw CompressionError("error while decompressing brotli file");
-
- if (avail_out < sizeof(outbuf) || avail_in == 0) {
- nextSink({(char *) outbuf, sizeof(outbuf) - avail_out});
- next_out = outbuf;
- avail_out = sizeof(outbuf);
+ uint8_t * out = (uint8_t *) data;
+ const auto * begin = out;
+
+ try {
+ while (len && !BrotliDecoderIsFinished(state.get())) {
+ checkInterrupt();
+
+ while (avail_in == 0) {
+ avail_in = inner->read(buf.get(), BUF_SIZE);
+ next_in = (const uint8_t *) buf.get();
+ }
+
+ if (!BrotliDecoderDecompressStream(
+ state.get(), &avail_in, &next_in, &len, &out, nullptr
+ ))
+ {
+ throw CompressionError("error while decompressing brotli file");
+ }
}
+ } catch (EndOfFile &) {
+ }
- finished = BrotliDecoderIsFinished(state);
+ if (begin != out) {
+ return out - begin;
+ } else {
+ throw EndOfFile("brotli stream exhausted");
}
}
};
std::string decompress(const std::string & method, std::string_view in)
{
- StringSink ssink;
- auto sink = makeDecompressionSink(method, ssink);
- (*sink)(in);
- sink->finish();
- return std::move(ssink.s);
+ StringSource src{in};
+ auto filter = makeDecompressionSource(method, src);
+ return filter->drain();
}
std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Sink & nextSink)
@@ -202,7 +202,19 @@ std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Si
if (method == "none" || method == "")
return std::make_unique<NoneSink>(nextSink);
else if (method == "br")
- return std::make_unique<BrotliDecompressionSink>(nextSink);
+ return sourceToSink([&](Source & source) {
+ BrotliDecompressionSource wrapped{source};
+ wrapped.drainInto(nextSink);
+ // special handling because sourceToSink is screwy: try
+ // to read the source one final time and fail when that
+ // succeeds (to reject trailing garbage in input data).
+ try {
+ char buf;
+ source(&buf, 1);
+ throw Error("garbage at end of brotli stream detected");
+ } catch (EndOfFile &) {
+ }
+ });
else
return sourceToSink([&](Source & source) {
auto decompressionSource = std::make_unique<ArchiveDecompressionSource>(source);
@@ -210,6 +222,19 @@ std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Si
});
}
+std::unique_ptr<Source> makeDecompressionSource(const std::string & method, Source & inner)
+{
+ if (method == "none" || method == "") {
+ return std::make_unique<LambdaSource>([&](char * data, size_t len) {
+ return inner.read(data, len);
+ });
+ } else if (method == "br") {
+ return std::make_unique<BrotliDecompressionSource>(inner);
+ } else {
+ return std::make_unique<ArchiveDecompressionSource>(inner);
+ }
+}
+
struct BrotliCompressionSink : ChunkedCompressionSink
{
Sink & nextSink;
diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh
index 4e53a7b3c..8affdddd6 100644
--- a/src/libutil/compression.hh
+++ b/src/libutil/compression.hh
@@ -19,6 +19,7 @@ struct CompressionSink : BufferedSink, FinishSink
std::string decompress(const std::string & method, std::string_view in);
std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Sink & nextSink);
+std::unique_ptr<Source> makeDecompressionSource(const std::string & method, Source & inner);
std::string compress(const std::string & method, std::string_view in, const bool parallel = false, int level = -1);
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index 0884f9f32..06dfba0df 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -45,6 +45,8 @@ typedef enum {
lvlVomit
} Verbosity;
+Verbosity verbosityFromIntClamped(int val);
+
/**
* The lines of code surrounding an error.
*/
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index d573b22b4..f51f3c092 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -289,12 +289,17 @@ std::string readFile(const Path & path)
}
-void readFile(const Path & path, Sink & sink)
+box_ptr<Source> readFileSource(const Path & path)
{
AutoCloseFD fd{open(path.c_str(), O_RDONLY | O_CLOEXEC)};
if (!fd)
throw SysError("opening file '%s'", path);
- drainFD(fd.get(), sink);
+
+ struct FileSource : FdSource {
+ AutoCloseFD fd;
+ explicit FileSource(AutoCloseFD fd) : FdSource(fd.get()), fd(std::move(fd)) {}
+ };
+ return make_box_ptr<FileSource>(std::move(fd));
}
diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh
index 6c1923d55..64d884227 100644
--- a/src/libutil/file-system.hh
+++ b/src/libutil/file-system.hh
@@ -5,6 +5,7 @@
* Utiltities for working with the file sytem and file paths.
*/
+#include "box_ptr.hh"
#include "types.hh"
#include "file-descriptor.hh"
@@ -142,7 +143,7 @@ unsigned char getFileType(const Path & path);
* Read the contents of a file into a string.
*/
std::string readFile(const Path & path);
-void readFile(const Path & path, Sink & sink);
+box_ptr<Source> readFileSource(const Path & path);
/**
* Write a string to a file.
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index 006b5000c..822fa150e 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -324,7 +324,7 @@ Hash hashString(HashType ht, std::string_view s)
Hash hashFile(HashType ht, const Path & path)
{
HashSink sink(ht);
- readFile(path, sink);
+ readFileSource(path)->drainInto(sink);
return sink.finish().first;
}
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index b01bb4dd4..fecbc89ac 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -5,6 +5,7 @@
#include "position.hh"
#include "terminal.hh"
+#include <algorithm>
#include <atomic>
#include <sstream>
#include <nlohmann/json.hpp>
@@ -110,6 +111,12 @@ public:
Verbosity verbosity = lvlInfo;
+Verbosity verbosityFromIntClamped(int val)
+{
+ int clamped = std::clamp(val, int(lvlError), int(lvlVomit));
+ return static_cast<Verbosity>(clamped);
+}
+
void writeToStderr(std::string_view s)
{
try {
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index 39e5cc99d..7dbbbcc56 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -37,7 +37,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
Hash hash = narHash;
if (ingestionMethod == FileIngestionMethod::Flat) {
HashSink hsink(htSHA256);
- readFile(path, hsink);
+ readFileSource(path)->drainInto(hsink);
hash = hsink.finish().first;
}
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 9feca9345..66c5516e7 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -85,7 +85,7 @@ struct CmdHashBase : Command
switch (mode) {
case FileIngestionMethod::Flat:
- readFile(path, *hashSink);
+ readFileSource(path)->drainInto(*hashSink);
break;
case FileIngestionMethod::Recursive:
dumpPath(path, *hashSink);
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index 2457e4cc8..cad70e726 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -98,7 +98,7 @@ std::tuple<StorePath, Hash> prefetchFile(
FdSink sink(fd.get());
FileTransferRequest req(url);
- getFileTransfer()->download(std::move(req), sink);
+ getFileTransfer()->download(std::move(req))->drainInto(sink);
}
/* Optionally unpack the file. */
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
index cbc28fdd7..c7f31f3fb 100644
--- a/src/nix/upgrade-nix.cc
+++ b/src/nix/upgrade-nix.cc
@@ -286,7 +286,7 @@ struct CmdUpgradeNix : MixDryRun, EvalCommand
// FIXME: use nixos.org?
auto req = FileTransferRequest(storePathsUrl);
- auto res = getFileTransfer()->download(req);
+ auto res = getFileTransfer()->transfer(req);
auto state = std::make_unique<EvalState>(SearchPath{}, store);
auto v = state->allocValue();