aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/libexpr/common-eval-args.cc2
-rw-r--r--src/libexpr/eval.cc4
-rw-r--r--src/libexpr/nixexpr.hh6
-rw-r--r--src/libexpr/parser.y10
-rw-r--r--src/libexpr/primops.cc7
-rw-r--r--src/libexpr/primops/fetchTree.cc2
-rw-r--r--src/libfetchers/github.cc2
-rw-r--r--src/libfetchers/tarball.cc10
-rw-r--r--src/libstore/build.cc6
-rw-r--r--src/libstore/builtins/fetchurl.cc10
-rw-r--r--src/libstore/filetransfer.cc (renamed from src/libstore/download.cc)140
-rw-r--r--src/libstore/filetransfer.hh (renamed from src/libstore/download.hh)47
-rw-r--r--src/libstore/http-binary-cache-store.cc35
-rw-r--r--src/libstore/s3-binary-cache-store.cc6
-rw-r--r--src/libstore/s3.hh4
-rw-r--r--src/libutil/logging.cc2
-rw-r--r--src/libutil/logging.hh2
-rwxr-xr-xsrc/nix-channel/nix-channel.cc4
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc6
-rw-r--r--src/nix/main.cc10
-rw-r--r--src/nix/progress-bar.cc6
-rw-r--r--src/nix/run.cc4
-rw-r--r--src/nix/upgrade-nix.cc6
23 files changed, 175 insertions, 156 deletions
diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc
index f965c194f..47a57f91c 100644
--- a/src/libexpr/common-eval-args.cc
+++ b/src/libexpr/common-eval-args.cc
@@ -1,6 +1,6 @@
#include "common-eval-args.hh"
#include "shared.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "util.hh"
#include "eval.hh"
#include "fetchers.hh"
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 6fcb2917c..c5945adf0 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -5,7 +5,7 @@
#include "derivations.hh"
#include "globals.hh"
#include "eval-inline.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "json.hh"
#include "function-trace.hh"
#include "flake/flake.hh"
@@ -1296,7 +1296,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v)
void ExprIf::eval(EvalState & state, Env & env, Value & v)
{
- (state.evalBool(env, cond) ? then : else_)->eval(state, env, v);
+ (state.evalBool(env, cond, pos) ? then : else_)->eval(state, env, v);
}
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index f7e9105a4..25798cac6 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -209,9 +209,10 @@ struct ExprList : Expr
struct Formal
{
+ Pos pos;
Symbol name;
Expr * def;
- Formal(const Symbol & name, Expr * def) : name(name), def(def) { };
+ Formal(const Pos & pos, const Symbol & name, Expr * def) : pos(pos), name(name), def(def) { };
};
struct Formals
@@ -261,8 +262,9 @@ struct ExprWith : Expr
struct ExprIf : Expr
{
+ Pos pos;
Expr * cond, * then, * else_;
- ExprIf(Expr * cond, Expr * then, Expr * else_) : cond(cond), then(then), else_(else_) { };
+ ExprIf(const Pos & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { };
COMMON_METHODS
};
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index a30fb44b5..1993fa6c1 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -335,7 +335,7 @@ expr_function
;
expr_if
- : IF expr THEN expr ELSE expr { $$ = new ExprIf($2, $4, $6); }
+ : IF expr THEN expr ELSE expr { $$ = new ExprIf(CUR_POS, $2, $4, $6); }
| expr_op
;
@@ -531,8 +531,8 @@ formals
;
formal
- : ID { $$ = new Formal(data->symbols.create($1), 0); }
- | ID '?' expr { $$ = new Formal(data->symbols.create($1), $3); }
+ : ID { $$ = new Formal(CUR_POS, data->symbols.create($1), 0); }
+ | ID '?' expr { $$ = new Formal(CUR_POS, data->symbols.create($1), $3); }
;
%%
@@ -544,7 +544,7 @@ formal
#include <unistd.h>
#include "eval.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "fetchers.hh"
#include "store-api.hh"
@@ -690,7 +690,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
try {
res = { true, store->toRealPath(fetchers::downloadTarball(
store, resolveUri(elem.second), "source", false).storePath) };
- } catch (DownloadError & e) {
+ } catch (FileTransferError & e) {
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
res = { false, "" };
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index fb7d5497b..ed0a97757 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1354,9 +1354,12 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
}
state.mkAttrs(v, args[0]->lambda.fun->formals->formals.size());
- for (auto & i : args[0]->lambda.fun->formals->formals)
+ for (auto & i : args[0]->lambda.fun->formals->formals) {
// !!! should optimise booleans (allocate only once)
- mkBool(*state.allocAttr(v, i.name), i.def);
+ Value * value = state.allocValue();
+ v.attrs->push_back(Attr(i.name, value, &i.pos));
+ mkBool(*value, i.def);
+ }
v.attrs->sort();
}
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 4f66fc6c1..9d1f5b8d4 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -2,7 +2,7 @@
#include "eval-inline.hh"
#include "store-api.hh"
#include "fetchers.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "registry.hh"
#include <ctime>
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index f50c927eb..c01917dcc 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -1,4 +1,4 @@
-#include "download.hh"
+#include "filetransfer.hh"
#include "cache.hh"
#include "fetchers.hh"
#include "globals.hh"
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 4c4e5828e..695525b31 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -1,6 +1,6 @@
#include "fetchers.hh"
#include "cache.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "globals.hh"
#include "store-api.hh"
#include "archive.hh"
@@ -36,13 +36,13 @@ DownloadFileResult downloadFile(
if (cached && !cached->expired)
return useCached();
- DownloadRequest request(url);
+ FileTransferRequest request(url);
if (cached)
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
- DownloadResult res;
+ FileTransferResult res;
try {
- res = getDownloader()->download(request);
- } catch (DownloadError & e) {
+ res = getFileTransfer()->download(request);
+ } catch (FileTransferError & e) {
if (cached) {
warn("%s; using cached version", e.msg());
return useCached();
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 527d7ac42..0febb8dfb 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -7,7 +7,7 @@
#include "affinity.hh"
#include "builtins.hh"
#include "builtins/buildenv.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "finally.hh"
#include "compression.hh"
#include "json.hh"
@@ -361,7 +361,7 @@ public:
{
actDerivations.progress(doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds);
actSubstitutions.progress(doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions);
- act.setExpected(actDownload, expectedDownloadSize + doneDownloadSize);
+ act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize);
act.setExpected(actCopyPath, expectedNarSize + doneNarSize);
}
};
@@ -2161,7 +2161,7 @@ void DerivationGoal::startBuilder()
if (needsHashRewrite()) {
if (pathExists(homeDir))
- throw Error(format("directory '%1%' exists; please remove it") % homeDir);
+ throw Error(format("home directory '%1%' exists; please remove it to assure purity of builds without sandboxing") % homeDir);
/* We're not doing a chroot build, but we have some valid
output paths. Since we can't just overwrite or delete
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index f6ae5d2e6..486babf14 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -1,5 +1,5 @@
#include "builtins.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "archive.hh"
#include "compression.hh"
@@ -26,9 +26,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
auto mainUrl = getAttr("url");
bool unpack = get(drv.env, "unpack").value_or("") == "1";
- /* Note: have to use a fresh downloader here because we're in
+ /* Note: have to use a fresh fileTransfer here because we're in
a forked process. */
- auto downloader = makeDownloader();
+ auto fileTransfer = makeFileTransfer();
auto fetch = [&](const std::string & url) {
@@ -36,13 +36,13 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
/* No need to do TLS verification, because we check the hash of
the result anyway. */
- DownloadRequest request(url);
+ FileTransferRequest request(url);
request.verifyTLS = false;
request.decompress = false;
auto decompressor = makeDecompressionSink(
unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink);
- downloader->download(std::move(request), *decompressor);
+ fileTransfer->download(std::move(request), *decompressor);
decompressor->finish();
});
diff --git a/src/libstore/download.cc b/src/libstore/filetransfer.cc
index af69699a8..e9684b3d4 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/filetransfer.cc
@@ -1,4 +1,4 @@
-#include "download.hh"
+#include "filetransfer.hh"
#include "util.hh"
#include "globals.hh"
#include "store-api.hh"
@@ -27,9 +27,9 @@ using namespace std::string_literals;
namespace nix {
-DownloadSettings downloadSettings;
+FileTransferSettings fileTransferSettings;
-static GlobalConfig::Register r1(&downloadSettings);
+static GlobalConfig::Register r1(&fileTransferSettings);
std::string resolveUri(const std::string & uri)
{
@@ -39,21 +39,21 @@ std::string resolveUri(const std::string & uri)
return uri;
}
-struct CurlDownloader : public Downloader
+struct curlFileTransfer : public FileTransfer
{
CURLM * curlm = 0;
std::random_device rd;
std::mt19937 mt19937;
- struct DownloadItem : public std::enable_shared_from_this<DownloadItem>
+ struct TransferItem : public std::enable_shared_from_this<TransferItem>
{
- CurlDownloader & downloader;
- DownloadRequest request;
- DownloadResult result;
+ curlFileTransfer & fileTransfer;
+ FileTransferRequest request;
+ FileTransferResult result;
Activity act;
bool done = false; // whether either the success or failure function has been called
- Callback<DownloadResult> callback;
+ Callback<FileTransferResult> callback;
CURL * req = 0;
bool active = false; // whether the handle has been added to the multi object
std::string status;
@@ -72,19 +72,26 @@ struct CurlDownloader : public Downloader
curl_off_t writtenToSink = 0;
- DownloadItem(CurlDownloader & downloader,
- const DownloadRequest & request,
- Callback<DownloadResult> && callback)
- : downloader(downloader)
+ TransferItem(curlFileTransfer & fileTransfer,
+ const FileTransferRequest & request,
+ Callback<FileTransferResult> && callback)
+ : fileTransfer(fileTransfer)
, request(request)
- , act(*logger, lvlTalkative, actDownload,
+ , act(*logger, lvlTalkative, actFileTransfer,
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
{request.uri}, request.parentAct)
, callback(std::move(callback))
, finalSink([this](const unsigned char * data, size_t len) {
if (this->request.dataCallback) {
- writtenToSink += len;
- this->request.dataCallback((char *) data, len);
+ long httpStatus = 0;
+ curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
+
+ /* Only write data to the sink if this is a
+ successful response. */
+ if (httpStatus == 0 || httpStatus == 200 || httpStatus == 201 || httpStatus == 206) {
+ writtenToSink += len;
+ this->request.dataCallback((char *) data, len);
+ }
} else
this->result.data->append((char *) data, len);
})
@@ -95,17 +102,17 @@ struct CurlDownloader : public Downloader
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
}
- ~DownloadItem()
+ ~TransferItem()
{
if (req) {
if (active)
- curl_multi_remove_handle(downloader.curlm, req);
+ curl_multi_remove_handle(fileTransfer.curlm, req);
curl_easy_cleanup(req);
}
if (requestHeaders) curl_slist_free_all(requestHeaders);
try {
if (!done)
- fail(DownloadError(Interrupted, format("download of '%s' was interrupted") % request.uri));
+ fail(FileTransferError(Interrupted, format("download of '%s' was interrupted") % request.uri));
} catch (...) {
ignoreException();
}
@@ -149,7 +156,7 @@ struct CurlDownloader : public Downloader
static size_t writeCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
{
- return ((DownloadItem *) userp)->writeCallback(contents, size, nmemb);
+ return ((TransferItem *) userp)->writeCallback(contents, size, nmemb);
}
size_t headerCallback(void * contents, size_t size, size_t nmemb)
@@ -191,7 +198,7 @@ struct CurlDownloader : public Downloader
static size_t headerCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
{
- return ((DownloadItem *) userp)->headerCallback(contents, size, nmemb);
+ return ((TransferItem *) userp)->headerCallback(contents, size, nmemb);
}
int progressCallback(double dltotal, double dlnow)
@@ -206,7 +213,7 @@ struct CurlDownloader : public Downloader
static int progressCallbackWrapper(void * userp, double dltotal, double dlnow, double ultotal, double ulnow)
{
- return ((DownloadItem *) userp)->progressCallback(dltotal, dlnow);
+ return ((TransferItem *) userp)->progressCallback(dltotal, dlnow);
}
static int debugCallback(CURL * handle, curl_infotype type, char * data, size_t size, void * userptr)
@@ -230,7 +237,7 @@ struct CurlDownloader : public Downloader
static size_t readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp)
{
- return ((DownloadItem *) userp)->readCallback(buffer, size, nitems);
+ return ((TransferItem *) userp)->readCallback(buffer, size, nitems);
}
void init()
@@ -241,7 +248,7 @@ struct CurlDownloader : public Downloader
if (verbosity >= lvlVomit) {
curl_easy_setopt(req, CURLOPT_VERBOSE, 1);
- curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, DownloadItem::debugCallback);
+ curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
}
curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str());
@@ -250,19 +257,19 @@ struct CurlDownloader : public Downloader
curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1);
curl_easy_setopt(req, CURLOPT_USERAGENT,
("curl/" LIBCURL_VERSION " Nix/" + nixVersion +
- (downloadSettings.userAgentSuffix != "" ? " " + downloadSettings.userAgentSuffix.get() : "")).c_str());
+ (fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")).c_str());
#if LIBCURL_VERSION_NUM >= 0x072b00
curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1);
#endif
#if LIBCURL_VERSION_NUM >= 0x072f00
- if (downloadSettings.enableHttp2)
+ if (fileTransferSettings.enableHttp2)
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS);
else
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
#endif
- curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, DownloadItem::writeCallbackWrapper);
+ curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, TransferItem::writeCallbackWrapper);
curl_easy_setopt(req, CURLOPT_WRITEDATA, this);
- curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, DownloadItem::headerCallbackWrapper);
+ curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, TransferItem::headerCallbackWrapper);
curl_easy_setopt(req, CURLOPT_HEADERDATA, this);
curl_easy_setopt(req, CURLOPT_PROGRESSFUNCTION, progressCallbackWrapper);
@@ -290,10 +297,10 @@ struct CurlDownloader : public Downloader
curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0);
}
- curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, downloadSettings.connectTimeout.get());
+ curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, fileTransferSettings.connectTimeout.get());
curl_easy_setopt(req, CURLOPT_LOW_SPEED_LIMIT, 1L);
- curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, downloadSettings.stalledDownloadTimeout.get());
+ curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, fileTransferSettings.stalledDownloadTimeout.get());
/* If no file exist in the specified path, curl continues to work
anyway as if netrc support was disabled. */
@@ -394,14 +401,14 @@ struct CurlDownloader : public Downloader
auto exc =
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
- ? DownloadError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
+ ? FileTransferError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
: httpStatus != 0
- ? DownloadError(err,
+ ? FileTransferError(err,
fmt("unable to %s '%s': HTTP error %d",
request.verb(), request.uri, httpStatus)
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
)
- : DownloadError(err,
+ : FileTransferError(err,
fmt("unable to %s '%s': %s (%d)",
request.verb(), request.uri, curl_easy_strerror(code), code));
@@ -415,13 +422,13 @@ struct CurlDownloader : public Downloader
|| writtenToSink == 0
|| (acceptRanges && encoding.empty())))
{
- int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(downloader.mt19937));
+ int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(fileTransfer.mt19937));
if (writtenToSink)
warn("%s; retrying from offset %d in %d ms", exc.what(), writtenToSink, ms);
else
warn("%s; retrying in %d ms", exc.what(), ms);
embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms);
- downloader.enqueueItem(shared_from_this());
+ fileTransfer.enqueueItem(shared_from_this());
}
else
fail(exc);
@@ -432,12 +439,12 @@ struct CurlDownloader : public Downloader
struct State
{
struct EmbargoComparator {
- bool operator() (const std::shared_ptr<DownloadItem> & i1, const std::shared_ptr<DownloadItem> & i2) {
+ bool operator() (const std::shared_ptr<TransferItem> & i1, const std::shared_ptr<TransferItem> & i2) {
return i1->embargo > i2->embargo;
}
};
bool quit = false;
- std::priority_queue<std::shared_ptr<DownloadItem>, std::vector<std::shared_ptr<DownloadItem>>, EmbargoComparator> incoming;
+ std::priority_queue<std::shared_ptr<TransferItem>, std::vector<std::shared_ptr<TransferItem>>, EmbargoComparator> incoming;
};
Sync<State> state_;
@@ -449,7 +456,7 @@ struct CurlDownloader : public Downloader
std::thread workerThread;
- CurlDownloader()
+ curlFileTransfer()
: mt19937(rd())
{
static std::once_flag globalInit;
@@ -462,7 +469,7 @@ struct CurlDownloader : public Downloader
#endif
#if LIBCURL_VERSION_NUM >= 0x071e00 // Max connections requires >= 7.30.0
curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS,
- downloadSettings.httpConnections.get());
+ fileTransferSettings.httpConnections.get());
#endif
wakeupPipe.create();
@@ -471,7 +478,7 @@ struct CurlDownloader : public Downloader
workerThread = std::thread([&]() { workerThreadEntry(); });
}
- ~CurlDownloader()
+ ~curlFileTransfer()
{
stopWorkerThread();
@@ -497,7 +504,7 @@ struct CurlDownloader : public Downloader
stopWorkerThread();
});
- std::map<CURL *, std::shared_ptr<DownloadItem>> items;
+ std::map<CURL *, std::shared_ptr<TransferItem>> items;
bool quit = false;
@@ -554,7 +561,7 @@ struct CurlDownloader : public Downloader
throw SysError("reading curl wakeup socket");
}
- std::vector<std::shared_ptr<DownloadItem>> incoming;
+ std::vector<std::shared_ptr<TransferItem>> incoming;
auto now = std::chrono::steady_clock::now();
{
@@ -602,7 +609,7 @@ struct CurlDownloader : public Downloader
}
}
- void enqueueItem(std::shared_ptr<DownloadItem> item)
+ void enqueueItem(std::shared_ptr<TransferItem> item)
{
if (item->request.data
&& !hasPrefix(item->request.uri, "http://")
@@ -634,8 +641,8 @@ struct CurlDownloader : public Downloader
}
#endif
- void enqueueDownload(const DownloadRequest & request,
- Callback<DownloadResult> callback) override
+ void enqueueFileTransfer(const FileTransferRequest & request,
+ Callback<FileTransferResult> callback) override
{
/* Ugly hack to support s3:// URIs. */
if (hasPrefix(request.uri, "s3://")) {
@@ -653,9 +660,9 @@ struct CurlDownloader : public Downloader
// FIXME: implement ETag
auto s3Res = s3Helper.getObject(bucketName, key);
- DownloadResult res;
+ FileTransferResult res;
if (!s3Res.data)
- throw DownloadError(NotFound, fmt("S3 object '%s' does not exist", request.uri));
+ throw FileTransferError(NotFound, fmt("S3 object '%s' does not exist", request.uri));
res.data = s3Res.data;
callback(std::move(res));
#else
@@ -665,26 +672,26 @@ struct CurlDownloader : public Downloader
return;
}
- enqueueItem(std::make_shared<DownloadItem>(*this, request, std::move(callback)));
+ enqueueItem(std::make_shared<TransferItem>(*this, request, std::move(callback)));
}
};
-ref<Downloader> getDownloader()
+ref<FileTransfer> getFileTransfer()
{
- static ref<Downloader> downloader = makeDownloader();
- return downloader;
+ static ref<FileTransfer> fileTransfer = makeFileTransfer();
+ return fileTransfer;
}
-ref<Downloader> makeDownloader()
+ref<FileTransfer> makeFileTransfer()
{
- return make_ref<CurlDownloader>();
+ return make_ref<curlFileTransfer>();
}
-std::future<DownloadResult> Downloader::enqueueDownload(const DownloadRequest & request)
+std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
{
- auto promise = std::make_shared<std::promise<DownloadResult>>();
- enqueueDownload(request,
- {[promise](std::future<DownloadResult> fut) {
+ auto promise = std::make_shared<std::promise<FileTransferResult>>();
+ enqueueFileTransfer(request,
+ {[promise](std::future<FileTransferResult> fut) {
try {
promise->set_value(fut.get());
} catch (...) {
@@ -694,15 +701,21 @@ std::future<DownloadResult> Downloader::enqueueDownload(const DownloadRequest &
return promise->get_future();
}
-DownloadResult Downloader::download(const DownloadRequest & request)
+FileTransferResult FileTransfer::download(const FileTransferRequest & request)
{
- return enqueueDownload(request).get();
+ return enqueueFileTransfer(request).get();
}
-void Downloader::download(DownloadRequest && request, Sink & sink)
+FileTransferResult FileTransfer::upload(const FileTransferRequest & request)
+{
+ /* Note: this method is the same as download, but helps in readability */
+ return enqueueFileTransfer(request).get();
+}
+
+void FileTransfer::download(FileTransferRequest && request, Sink & sink)
{
/* Note: we can't call 'sink' via request.dataCallback, because
- that would cause the sink to execute on the downloader
+ that would cause the sink to execute on the fileTransfer
thread. If 'sink' is a coroutine, this will fail. Also, if the
sink is expensive (e.g. one that does decompression and writing
to the Nix store), it would stall the download thread too much.
@@ -748,8 +761,8 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
state->avail.notify_one();
};
- enqueueDownload(request,
- {[_state](std::future<DownloadResult> fut) {
+ enqueueFileTransfer(request,
+ {[_state](std::future<FileTransferResult> fut) {
auto state(_state->lock());
state->quit = true;
try {
@@ -794,7 +807,6 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
}
}
-
bool isUri(const string & s)
{
if (s.compare(0, 8, "channel:") == 0) return true;
diff --git a/src/libstore/download.hh b/src/libstore/filetransfer.hh
index 28c8a9162..2347f363d 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/filetransfer.hh
@@ -9,7 +9,7 @@
namespace nix {
-struct DownloadSettings : Config
+struct FileTransferSettings : Config
{
Setting<bool> enableHttp2{this, true, "http2",
"Whether to enable HTTP/2 support."};
@@ -31,15 +31,15 @@ struct DownloadSettings : Config
"How often Nix will attempt to download a file before giving up."};
};
-extern DownloadSettings downloadSettings;
+extern FileTransferSettings fileTransferSettings;
-struct DownloadRequest
+struct FileTransferRequest
{
std::string uri;
std::string expectedETag;
bool verifyTLS = true;
bool head = false;
- size_t tries = downloadSettings.tries;
+ size_t tries = fileTransferSettings.tries;
unsigned int baseRetryTimeMs = 250;
ActivityId parentAct;
bool decompress = true;
@@ -47,7 +47,7 @@ struct DownloadRequest
std::string mimeType;
std::function<void(char *, size_t)> dataCallback;
- DownloadRequest(const std::string & uri)
+ FileTransferRequest(const std::string & uri)
: uri(uri), parentAct(getCurActivity()) { }
std::string verb()
@@ -56,7 +56,7 @@ struct DownloadRequest
}
};
-struct DownloadResult
+struct FileTransferResult
{
bool cached = false;
std::string etag;
@@ -67,40 +67,43 @@ struct DownloadResult
class Store;
-struct Downloader
+struct FileTransfer
{
- virtual ~Downloader() { }
+ virtual ~FileTransfer() { }
- /* Enqueue a download request, returning a future to the result of
- the download. The future may throw a DownloadError
+ /* Enqueue a data transfer request, returning a future to the result of
+ the download. The future may throw a FileTransferError
exception. */
- virtual void enqueueDownload(const DownloadRequest & request,
- Callback<DownloadResult> callback) = 0;
+ virtual void enqueueFileTransfer(const FileTransferRequest & request,
+ Callback<FileTransferResult> callback) = 0;
- std::future<DownloadResult> enqueueDownload(const DownloadRequest & request);
+ std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request);
/* Synchronously download a file. */
- DownloadResult download(const DownloadRequest & request);
+ FileTransferResult download(const FileTransferRequest & request);
+
+ /* Synchronously upload a file. */
+ FileTransferResult upload(const FileTransferRequest & request);
/* Download a file, writing its data to a sink. The sink will be
invoked on the thread of the caller. */
- void download(DownloadRequest && request, Sink & sink);
+ void download(FileTransferRequest && request, Sink & sink);
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
};
-/* Return a shared Downloader object. Using this object is preferred
+/* Return a shared FileTransfer object. Using this object is preferred
because it enables connection reuse and HTTP/2 multiplexing. */
-ref<Downloader> getDownloader();
+ref<FileTransfer> getFileTransfer();
-/* Return a new Downloader object. */
-ref<Downloader> makeDownloader();
+/* Return a new FileTransfer object. */
+ref<FileTransfer> makeFileTransfer();
-class DownloadError : public Error
+class FileTransferError : public Error
{
public:
- Downloader::Error error;
- DownloadError(Downloader::Error error, const FormatOrString & fs)
+ FileTransfer::Error error;
+ FileTransferError(FileTransfer::Error error, const FormatOrString & fs)
: Error(fs), error(error)
{ }
};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 011794c62..451a64785 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -1,5 +1,5 @@
#include "binary-cache-store.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "globals.hh"
#include "nar-info-disk-cache.hh"
@@ -85,14 +85,14 @@ protected:
checkEnabled();
try {
- DownloadRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(cacheUri + "/" + path);
request.head = true;
- getDownloader()->download(request);
+ getFileTransfer()->download(request);
return true;
- } catch (DownloadError & e) {
+ } catch (FileTransferError & e) {
/* S3 buckets return 403 if a file doesn't exist and the
bucket is unlistable, so treat 403 as 404. */
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
return false;
maybeDisable();
throw;
@@ -103,19 +103,19 @@ protected:
const std::string & data,
const std::string & mimeType) override
{
- auto req = DownloadRequest(cacheUri + "/" + path);
+ auto req = FileTransferRequest(cacheUri + "/" + path);
req.data = std::make_shared<string>(data); // FIXME: inefficient
req.mimeType = mimeType;
try {
- getDownloader()->download(req);
- } catch (DownloadError & e) {
+ getFileTransfer()->upload(req);
+ } catch (FileTransferError & e) {
throw UploadToHTTP("while uploading to HTTP binary cache at '%s': %s", cacheUri, e.msg());
}
}
- DownloadRequest makeRequest(const std::string & path)
+ FileTransferRequest makeRequest(const std::string & path)
{
- DownloadRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(cacheUri + "/" + path);
return request;
}
@@ -124,9 +124,9 @@ protected:
checkEnabled();
auto request(makeRequest(path));
try {
- getDownloader()->download(std::move(request), sink);
- } catch (DownloadError & e) {
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ getFileTransfer()->download(std::move(request), sink);
+ } catch (FileTransferError & e) {
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri());
maybeDisable();
throw;
@@ -142,12 +142,12 @@ protected:
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
- getDownloader()->enqueueDownload(request,
- {[callbackPtr, this](std::future<DownloadResult> result) {
+ getFileTransfer()->enqueueFileTransfer(request,
+ {[callbackPtr, this](std::future<FileTransferResult> result) {
try {
(*callbackPtr)(result.get().data);
- } catch (DownloadError & e) {
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ } catch (FileTransferError & e) {
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
return (*callbackPtr)(std::shared_ptr<std::string>());
maybeDisable();
callbackPtr->rethrow();
@@ -174,4 +174,3 @@ static RegisterStoreImplementation regStore([](
});
}
-
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index f2e4b63e0..b24e7b7d6 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -6,7 +6,7 @@
#include "nar-info-disk-cache.hh"
#include "globals.hh"
#include "compression.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "istringstream_nocopy.hh"
#include <aws/core/Aws.h>
@@ -132,7 +132,7 @@ ref<Aws::Client::ClientConfiguration> S3Helper::makeConfig(const string & region
return res;
}
-S3Helper::DownloadResult S3Helper::getObject(
+S3Helper::FileTransferResult S3Helper::getObject(
const std::string & bucketName, const std::string & key)
{
debug("fetching 's3://%s/%s'...", bucketName, key);
@@ -146,7 +146,7 @@ S3Helper::DownloadResult S3Helper::getObject(
return Aws::New<std::stringstream>("STRINGSTREAM");
});
- DownloadResult res;
+ FileTransferResult res;
auto now1 = std::chrono::steady_clock::now();
diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh
index ef5f23d0f..2042bffcf 100644
--- a/src/libstore/s3.hh
+++ b/src/libstore/s3.hh
@@ -18,13 +18,13 @@ struct S3Helper
ref<Aws::Client::ClientConfiguration> makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint);
- struct DownloadResult
+ struct FileTransferResult
{
std::shared_ptr<std::string> data;
unsigned int durationMs;
};
- DownloadResult getObject(
+ FileTransferResult getObject(
const std::string & bucketName, const std::string & key);
};
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index fa5c84a27..bb437cf1c 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -198,7 +198,7 @@ bool handleJSONLogMessage(const std::string & msg,
if (action == "start") {
auto type = (ActivityType) json["type"];
- if (trusted || type == actDownload)
+ if (trusted || type == actFileTransfer)
activities.emplace(std::piecewise_construct,
std::forward_as_tuple(json["id"]),
std::forward_as_tuple(*logger, (Verbosity) json["level"], type,
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index beb5e6b64..108b5dcb0 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -17,7 +17,7 @@ typedef enum {
typedef enum {
actUnknown = 0,
actCopyPath = 100,
- actDownload = 101,
+ actFileTransfer = 101,
actRealise = 102,
actCopyPaths = 103,
actBuilds = 104,
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 2a9defb4e..abd390414 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -1,6 +1,6 @@
#include "shared.hh"
#include "globals.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "../nix/legacy.hh"
#include "fetchers.hh"
@@ -113,7 +113,7 @@ static void update(const StringSet & channelNames)
// Download the channel tarball.
try {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath);
- } catch (DownloadError & e) {
+ } catch (FileTransferError & e) {
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath);
}
}
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index 2b9254659..748554b9c 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -1,6 +1,6 @@
#include "hash.hh"
#include "shared.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "eval.hh"
#include "eval-inline.hh"
@@ -180,9 +180,9 @@ static int _main(int argc, char * * argv)
FdSink sink(fd.get());
- DownloadRequest req(actualUri);
+ FileTransferRequest req(actualUri);
req.decompress = false;
- getDownloader()->download(std::move(req), sink);
+ getFileTransfer()->download(std::move(req), sink);
}
/* Optionally unpack the file. */
diff --git a/src/nix/main.cc b/src/nix/main.cc
index c5ca089ee..15f412dae 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -8,7 +8,7 @@
#include "shared.hh"
#include "store-api.hh"
#include "progress-bar.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "finally.hh"
#include <sys/types.h>
@@ -177,10 +177,10 @@ void mainWrapped(int argc, char * * argv)
settings.useSubstitutes = false;
if (!settings.tarballTtl.overriden)
settings.tarballTtl = std::numeric_limits<unsigned int>::max();
- if (!downloadSettings.tries.overriden)
- downloadSettings.tries = 0;
- if (!downloadSettings.connectTimeout.overriden)
- downloadSettings.connectTimeout = 1;
+ if (!fileTransferSettings.tries.overriden)
+ fileTransferSettings.tries = 0;
+ if (!fileTransferSettings.connectTimeout.overriden)
+ fileTransferSettings.connectTimeout = 1;
}
if (args.refresh)
diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc
index 26631416c..adc9b9a5d 100644
--- a/src/nix/progress-bar.cc
+++ b/src/nix/progress-bar.cc
@@ -190,8 +190,8 @@ public:
i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1));
}
- if ((type == actDownload && hasAncestor(*state, actCopyPath, parent))
- || (type == actDownload && hasAncestor(*state, actQueryPathInfo, parent))
+ if ((type == actFileTransfer && hasAncestor(*state, actCopyPath, parent))
+ || (type == actFileTransfer && hasAncestor(*state, actQueryPathInfo, parent))
|| (type == actCopyPath && hasAncestor(*state, actSubstitute, parent)))
i->visible = false;
@@ -416,7 +416,7 @@ public:
if (!s2.empty()) { res += " ("; res += s2; res += ')'; }
}
- showActivity(actDownload, "%s MiB DL", "%.1f", MiB);
+ showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB);
{
auto s = renderActivity(actOptimiseStore, "%s paths optimised");
diff --git a/src/nix/run.cc b/src/nix/run.cc
index 7fd9c4c7e..901b87fbb 100644
--- a/src/nix/run.cc
+++ b/src/nix/run.cc
@@ -59,14 +59,14 @@ struct RunCommon : virtual Command
struct CmdRun : InstallablesCommand, RunCommon, MixEnvironment
{
- std::vector<std::string> command = { "bash" };
+ std::vector<std::string> command = { getEnv("SHELL").value_or("bash") };
CmdRun()
{
mkFlag()
.longName("command")
.shortName('c')
- .description("command and arguments to be executed; defaults to 'bash'")
+ .description("command and arguments to be executed; defaults to '$SHELL'")
.labels({"command", "args"})
.arity(ArityAny)
.handler([&](std::vector<std::string> ss) {
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
index c05c29517..4fcc6a721 100644
--- a/src/nix/upgrade-nix.cc
+++ b/src/nix/upgrade-nix.cc
@@ -1,7 +1,7 @@
#include "command.hh"
#include "common-args.hh"
#include "store-api.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "eval.hh"
#include "attr-path.hh"
#include "names.hh"
@@ -138,8 +138,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
// FIXME: use nixos.org?
- auto req = DownloadRequest(storePathsUrl);
- auto res = getDownloader()->download(req);
+ auto req = FileTransferRequest(storePathsUrl);
+ auto res = getFileTransfer()->download(req);
auto state = std::make_unique<EvalState>(Strings(), store);
auto v = state->allocValue();