diff options
author | Eelco Dolstra <edolstra@gmail.com> | 2019-05-22 23:36:29 +0200 |
---|---|---|
committer | Eelco Dolstra <edolstra@gmail.com> | 2019-05-22 23:36:29 +0200 |
commit | df3f5a78d5ab0a1f2dc9d288b271b38a9b8b33b5 (patch) | |
tree | 8524ea993ec051f12b879e6b9a10963cfb19b812 /src | |
parent | 66f1d7ee95ba693a15ae5dc413289fee954f0f04 (diff) |
Refactor downloadCached() interface
Diffstat (limited to 'src')
-rw-r--r-- | src/libexpr/common-eval-args.cc | 8 | ||||
-rw-r--r-- | src/libexpr/parser.y | 4 | ||||
-rw-r--r-- | src/libexpr/primops.cc | 22 | ||||
-rw-r--r-- | src/libexpr/primops/flake.cc | 15 | ||||
-rw-r--r-- | src/libstore/download.cc | 44 | ||||
-rw-r--r-- | src/libstore/download.hh | 20 | ||||
-rwxr-xr-x | src/nix-channel/nix-channel.cc | 14 |
7 files changed, 73 insertions, 54 deletions
diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc index 8e94d358e..7c0d268bd 100644 --- a/src/libexpr/common-eval-args.cc +++ b/src/libexpr/common-eval-args.cc @@ -61,9 +61,11 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) Path lookupFileArg(EvalState & state, string s) { - if (isUri(s)) - return getDownloader()->downloadCached(state.store, s, true).path; - else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { + if (isUri(s)) { + CachedDownloadRequest request(s); + request.unpack = true; + return getDownloader()->downloadCached(state.store, request).path; + } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { Path p = s.substr(1, s.size() - 2); return state.findFile(p); } else diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 75f04df3e..0cfe29c96 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -657,7 +657,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl if (isUri(elem.second)) { try { - res = { true, getDownloader()->downloadCached(store, elem.second, true).path }; + CachedDownloadRequest request(elem.second); + request.unpack = true; + res = { true, getDownloader()->downloadCached(store, request).path }; } catch (DownloadError & e) { printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second); res = { false, "" }; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 55a1bde11..070e72f3a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2050,9 +2050,9 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, const string & who, bool unpack, const std::string & defaultName) { - string url; - Hash expectedHash; - string name = defaultName; + CachedDownloadRequest request(""); + request.unpack = unpack; + request.name = defaultName; state.forceValue(*args[0]); @@ -2063,27 +2063,27 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, for (auto & attr : *args[0]->attrs) { string n(attr.name); if (n == "url") - url = state.forceStringNoCtx(*attr.value, *attr.pos); + request.uri = state.forceStringNoCtx(*attr.value, *attr.pos); else if (n == "sha256") - expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); + request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256); else if (n == "name") - name = state.forceStringNoCtx(*attr.value, *attr.pos); + request.name = state.forceStringNoCtx(*attr.value, *attr.pos); else throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos); } - if (url.empty()) + if (request.uri.empty()) throw EvalError(format("'url' argument required, at %1%") % pos); } else - url = state.forceStringNoCtx(*args[0], pos); + request.uri = state.forceStringNoCtx(*args[0], pos); - state.checkURI(url); + state.checkURI(request.uri); - if (evalSettings.pureEval && !expectedHash) + if (evalSettings.pureEval && !request.expectedHash) throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who); - Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash).path; + Path res = getDownloader()->downloadCached(state.store, request).path; if (state.allowedPaths) state.allowedPaths->insert(res); diff --git a/src/libexpr/primops/flake.cc b/src/libexpr/primops/flake.cc index c6c380118..e2fdf08ca 100644 --- a/src/libexpr/primops/flake.cc +++ b/src/libexpr/primops/flake.cc @@ -136,9 +136,11 @@ std::shared_ptr<FlakeRegistry> EvalState::getGlobalFlakeRegistry() std::call_once(_globalFlakeRegistryInit, [&]() { auto path = evalSettings.flakeRegistry; - if (!hasPrefix(path, "/")) - path = getDownloader()->downloadCached(store, - evalSettings.flakeRegistry, false, "registry").path; + if (!hasPrefix(path, "/")) { + CachedDownloadRequest request(evalSettings.flakeRegistry); + request.name = "flake-registry.json"; + path = getDownloader()->downloadCached(store, request).path; + } _globalFlakeRegistry = readRegistry(path); }); @@ -244,8 +246,11 @@ static SourceInfo fetchFlake(EvalState & state, const FlakeRef & flakeRef, bool if (accessToken != "") url += "?access_token=" + accessToken; - auto result = getDownloader()->downloadCached(state.store, url, true, "source", - Hash(), nullptr, resolvedRef.rev ? 1000000000 : settings.tarballTtl); + CachedDownloadRequest request(url); + request.unpack = true; + request.name = "source"; + request.ttl = resolvedRef.rev ? 1000000000 : settings.tarballTtl; + auto result = getDownloader()->downloadCached(state.store, request); if (!result.etag) throw Error("did not receive an ETag header from '%s'", url); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 975cfd97d..a7c2600f6 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -319,10 +319,10 @@ struct CurlDownloader : public Downloader long httpStatus = 0; curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus); - char * effectiveUrlCStr; - curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr); - if (effectiveUrlCStr) - result.effectiveUrl = effectiveUrlCStr; + char * effectiveUriCStr; + curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr); + if (effectiveUriCStr) + result.effectiveUri = effectiveUriCStr; debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes", request.verb(), request.uri, code, httpStatus, result.bodySize); @@ -790,18 +790,20 @@ void Downloader::download(DownloadRequest && request, Sink & sink) } } -CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl, int ttl) +CachedDownloadResult Downloader::downloadCached( + ref<Store> store, const CachedDownloadRequest & request) { - auto url = resolveUri(url_); + auto url = resolveUri(request.uri); + auto name = request.name; if (name == "") { auto p = url.rfind('/'); if (p != string::npos) name = string(url, p + 1); } Path expectedStorePath; - if (expectedHash) { - expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name); + if (request.expectedHash) { + expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name); if (store->isValidPath(expectedStorePath)) { CachedDownloadResult result; result.storePath = expectedStorePath; @@ -835,10 +837,9 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n"); if (ss.size() >= 3 && ss[0] == url) { time_t lastChecked; - if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) { + if (string2Int(ss[2], lastChecked) && lastChecked + request.ttl >= time(0)) { skip = true; - if (effectiveUrl) - *effectiveUrl = url_; + result.effectiveUri = request.uri; result.etag = ss[1]; } else if (!ss[1].empty()) { debug(format("verifying previous ETag '%1%'") % ss[1]); @@ -852,18 +853,17 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & if (!skip) { try { - DownloadRequest request(url); - request.expectedETag = expectedETag; - auto res = download(request); - if (effectiveUrl) - *effectiveUrl = res.effectiveUrl; + DownloadRequest request2(url); + request2.expectedETag = expectedETag; + auto res = download(request2); + result.effectiveUri = res.effectiveUri; result.etag = res.etag; if (!res.cached) { ValidPathInfo info; StringSink sink; dumpString(*res.data, sink); - Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data); + Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data); info.path = store->makeFixedOutputPath(false, hash, name); info.narHash = hashString(htSHA256, *sink.s); info.narSize = sink.s->size(); @@ -883,7 +883,7 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & } } - if (unpack) { + if (request.unpack) { Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked"; PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink)); Path unpackedStorePath; @@ -906,11 +906,11 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & } if (expectedStorePath != "" && storePath != expectedStorePath) { - Hash gotHash = unpack - ? hashPath(expectedHash.type, store->toRealPath(storePath)).first - : hashFile(expectedHash.type, store->toRealPath(storePath)); + Hash gotHash = request.unpack + ? hashPath(request.expectedHash.type, store->toRealPath(storePath)).first + : hashFile(request.expectedHash.type, store->toRealPath(storePath)); throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", - url, expectedHash.to_string(), gotHash.to_string()); + url, request.expectedHash.to_string(), gotHash.to_string()); } result.storePath = storePath; diff --git a/src/libstore/download.hh b/src/libstore/download.hh index aa8c34be2..b676a1a7b 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -36,11 +36,23 @@ struct DownloadResult { bool cached = false; std::string etag; - std::string effectiveUrl; + std::string effectiveUri; std::shared_ptr<std::string> data; uint64_t bodySize = 0; }; +struct CachedDownloadRequest +{ + std::string uri; + bool unpack = false; + std::string name; + Hash expectedHash; + unsigned int ttl = settings.tarballTtl; + + CachedDownloadRequest(const std::string & uri) + : uri(uri) { } +}; + struct CachedDownloadResult { // Note: 'storePath' may be different from 'path' when using a @@ -48,6 +60,7 @@ struct CachedDownloadResult Path storePath; Path path; std::optional<std::string> etag; + std::string effectiveUri; }; class Store; @@ -73,10 +86,7 @@ struct Downloader and is more recent than ‘tarball-ttl’ seconds. Otherwise, use the recorded ETag to verify if the server has a more recent version, and if so, download it to the Nix store. */ - CachedDownloadResult downloadCached( - ref<Store> store, const string & uri, bool unpack, string name = "", - const Hash & expectedHash = Hash(), string * effectiveUri = nullptr, - int ttl = settings.tarballTtl); + CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request); enum Error { NotFound, Forbidden, Misc, Transient, Interrupted }; }; diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 7b23088a2..bd1371dba 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -86,10 +86,12 @@ static void update(const StringSet & channelNames) // We want to download the url to a file to see if it's a tarball while also checking if we // got redirected in the process, so that we can grab the various parts of a nix channel // definition from a consistent location if the redirect changes mid-download. - std::string effectiveUrl; + CachedDownloadRequest request(url); + request.ttl = 0; auto dl = getDownloader(); - auto filename = dl->downloadCached(store, url, false, "", Hash(), &effectiveUrl, 0).path; - url = chomp(std::move(effectiveUrl)); + auto result = dl->downloadCached(store, request); + auto filename = result.path; + url = chomp(result.effectiveUri); // If the URL contains a version number, append it to the name // attribute (so that "nix-env -q" on the channels profile @@ -121,12 +123,10 @@ static void update(const StringSet & channelNames) } // Download the channel tarball. - auto fullURL = url + "/nixexprs.tar.xz"; try { - filename = dl->downloadCached(store, fullURL, false).path; + filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path; } catch (DownloadError & e) { - fullURL = url + "/nixexprs.tar.bz2"; - filename = dl->downloadCached(store, fullURL, false).path; + filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path; } chomp(filename); } |