aboutsummaryrefslogtreecommitdiff
path: root/src/libstore
diff options
context:
space:
mode:
authorEelco Dolstra <eelco.dolstra@logicblox.com>2015-07-20 04:30:16 +0200
committerEelco Dolstra <eelco.dolstra@logicblox.com>2015-07-20 04:38:46 +0200
commit0a2bee307b20411f5b0dda0c662b1f9bb9e0e131 (patch)
treee0449738da384147f851a2343f8d21ae7591a3e0 /src/libstore
parenteda2f36c2ac847e02e871c327e7633693d92cd8d (diff)
Make <nix/fetchurl.nix> a builtin builder
This ensures that 1) the derivation doesn't change when Nix changes; 2) the derivation closure doesn't contain Nix and its dependencies; 3) we don't have to rely on ugly chroot hacks.
Diffstat (limited to 'src/libstore')
-rw-r--r--src/libstore/build.cc28
-rw-r--r--src/libstore/builtins.cc24
-rw-r--r--src/libstore/builtins.hh9
-rw-r--r--src/libstore/download.cc236
-rw-r--r--src/libstore/download.hh22
-rw-r--r--src/libstore/local.mk2
6 files changed, 317 insertions, 4 deletions
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 61677d1eb..46d2841dc 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -8,6 +8,7 @@
#include "util.hh"
#include "archive.hh"
#include "affinity.hh"
+#include "builtins.hh"
#include <map>
#include <sstream>
@@ -1269,6 +1270,12 @@ bool substitutesAllowed(const BasicDerivation & drv)
}
+static bool isBuiltin(const BasicDerivation & drv)
+{
+ return string(drv.builder, 0, 8) == "builtin:";
+}
+
+
void DerivationGoal::tryToBuild()
{
trace("trying to build");
@@ -2139,7 +2146,7 @@ void DerivationGoal::startBuilder()
#endif
{
ProcessOptions options;
- options.allowVfork = !buildUser.enabled();
+ options.allowVfork = !buildUser.enabled() && !isBuiltin(*drv);
pid = startProcess([&]() {
runChild();
}, options);
@@ -2386,7 +2393,9 @@ void DerivationGoal::runChild()
const char *builder = "invalid";
string sandboxProfile;
- if (useChroot && SANDBOX_ENABLED) {
+ if (isBuiltin(*drv))
+ ;
+ else if (useChroot && SANDBOX_ENABLED) {
/* Lots and lots and lots of file functions freak out if they can't stat their full ancestry */
PathSet ancestry;
@@ -2413,7 +2422,6 @@ void DerivationGoal::runChild()
for (auto & i : inputPaths)
dirsInChroot[i] = i;
-
/* TODO: we should factor out the policy cleanly, so we don't have to repeat the constants every time... */
sandboxProfile += "(version 1)\n";
@@ -2517,6 +2525,20 @@ void DerivationGoal::runChild()
}
/* Execute the program. This should not return. */
+ if (isBuiltin(*drv)) {
+ try {
+ logType = ltFlat;
+ if (drv->builder == "builtin:fetchurl")
+ builtinFetchurl(*drv);
+ else
+ throw Error(format("unsupported builtin function ‘%1%’") % string(drv->builder, 8));
+ _exit(0);
+ } catch (std::exception & e) {
+ writeFull(STDERR_FILENO, "error: " + string(e.what()) + "\n");
+ _exit(1);
+ }
+ }
+
execve(builder, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
throw SysError(format("executing ‘%1%’") % drv->builder);
diff --git a/src/libstore/builtins.cc b/src/libstore/builtins.cc
new file mode 100644
index 000000000..97d6cb943
--- /dev/null
+++ b/src/libstore/builtins.cc
@@ -0,0 +1,24 @@
+#include "builtins.hh"
+#include "download.hh"
+
+namespace nix {
+
+void builtinFetchurl(const BasicDerivation & drv)
+{
+ auto url = drv.env.find("url");
+ if (url == drv.env.end()) throw Error("attribute ‘url’ missing");
+ printMsg(lvlInfo, format("downloading ‘%1%’...") % url->second);
+ auto data = downloadFile(url->second); // FIXME: show progress
+
+ auto out = drv.env.find("out");
+ if (out == drv.env.end()) throw Error("attribute ‘url’ missing");
+ writeFile(out->second, data.data);
+
+ auto executable = drv.env.find("out");
+ if (executable != drv.env.end() && executable->second == "1") {
+ if (chmod(out->second.c_str(), 0755) == -1)
+ throw SysError(format("making ‘%1%’ executable") % out->second);
+ }
+}
+
+}
diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh
new file mode 100644
index 000000000..4b2431aa0
--- /dev/null
+++ b/src/libstore/builtins.hh
@@ -0,0 +1,9 @@
+#pragma once
+
+#include "derivations.hh"
+
+namespace nix {
+
+void builtinFetchurl(const BasicDerivation & drv);
+
+}
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
new file mode 100644
index 000000000..9bf3e13aa
--- /dev/null
+++ b/src/libstore/download.cc
@@ -0,0 +1,236 @@
+#include "download.hh"
+#include "util.hh"
+#include "globals.hh"
+#include "hash.hh"
+#include "store-api.hh"
+
+#include <curl/curl.h>
+
+namespace nix {
+
+struct Curl
+{
+ CURL * curl;
+ string data;
+ string etag, status, expectedETag;
+
+ struct curl_slist * requestHeaders;
+
+ static size_t writeCallback(void * contents, size_t size, size_t nmemb, void * userp)
+ {
+ Curl & c(* (Curl *) userp);
+ size_t realSize = size * nmemb;
+ c.data.append((char *) contents, realSize);
+ return realSize;
+ }
+
+ static size_t headerCallback(void * contents, size_t size, size_t nmemb, void * userp)
+ {
+ Curl & c(* (Curl *) userp);
+ size_t realSize = size * nmemb;
+ string line = string((char *) contents, realSize);
+ printMsg(lvlVomit, format("got header: %1%") % trim(line));
+ if (line.compare(0, 5, "HTTP/") == 0) { // new response starts
+ c.etag = "";
+ auto ss = tokenizeString<vector<string>>(line, " ");
+ c.status = ss.size() >= 2 ? ss[1] : "";
+ } else {
+ auto i = line.find(':');
+ if (i != string::npos) {
+ string name = trim(string(line, 0, i));
+ if (name == "ETag") { // FIXME: case
+ c.etag = trim(string(line, i + 1));
+ /* Hack to work around a GitHub bug: it sends
+ ETags, but ignores If-None-Match. So if we get
+ the expected ETag on a 200 response, then shut
+ down the connection because we already have the
+ data. */
+ printMsg(lvlDebug, format("got ETag: %1%") % c.etag);
+ if (c.etag == c.expectedETag && c.status == "200") {
+ printMsg(lvlDebug, format("shutting down on 200 HTTP response with expected ETag"));
+ return 0;
+ }
+ }
+ }
+ }
+ return realSize;
+ }
+
+ static int progressCallback(void * clientp, double dltotal, double dlnow, double ultotal, double ulnow)
+ {
+ return _isInterrupted;
+ }
+
+ Curl()
+ {
+ requestHeaders = 0;
+
+ curl = curl_easy_init();
+ if (!curl) throw Error("unable to initialize curl");
+
+ curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
+ curl_easy_setopt(curl, CURLOPT_CAINFO, getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt").c_str());
+ curl_easy_setopt(curl, CURLOPT_USERAGENT, ("Nix/" + nixVersion).c_str());
+ curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1);
+
+ curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, writeCallback);
+ curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *) &curl);
+
+ curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, headerCallback);
+ curl_easy_setopt(curl, CURLOPT_HEADERDATA, (void *) &curl);
+
+ curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progressCallback);
+ curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0);
+ }
+
+ ~Curl()
+ {
+ if (curl) curl_easy_cleanup(curl);
+ if (requestHeaders) curl_slist_free_all(requestHeaders);
+ }
+
+ bool fetch(const string & url, const string & expectedETag = "")
+ {
+ curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
+
+ data.clear();
+
+ if (requestHeaders) {
+ curl_slist_free_all(requestHeaders);
+ requestHeaders = 0;
+ }
+
+ if (!expectedETag.empty()) {
+ this->expectedETag = expectedETag;
+ requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + expectedETag).c_str());
+ }
+
+ curl_easy_setopt(curl, CURLOPT_HTTPHEADER, requestHeaders);
+
+ CURLcode res = curl_easy_perform(curl);
+ checkInterrupt();
+ if (res == CURLE_WRITE_ERROR && etag == expectedETag) return false;
+ if (res != CURLE_OK)
+ throw DownloadError(format("unable to download ‘%1%’: %2% (%3%)")
+ % url % curl_easy_strerror(res) % res);
+
+ long httpStatus = 0;
+ curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &httpStatus);
+ if (httpStatus == 304) return false;
+
+ return true;
+ }
+};
+
+
+DownloadResult downloadFile(string url, string expectedETag)
+{
+ DownloadResult res;
+ Curl curl;
+ if (curl.fetch(url, expectedETag)) {
+ res.cached = false;
+ res.data = curl.data;
+ } else
+ res.cached = true;
+ res.etag = curl.etag;
+ return res;
+}
+
+
+Path downloadFileCached(const string & url, bool unpack)
+{
+ Path cacheDir = getEnv("XDG_CACHE_HOME", getEnv("HOME", "") + "/.cache") + "/nix/tarballs";
+ createDirs(cacheDir);
+
+ string urlHash = printHash32(hashString(htSHA256, url));
+
+ Path dataFile = cacheDir + "/" + urlHash + ".info";
+ Path fileLink = cacheDir + "/" + urlHash + "-file";
+
+ Path storePath;
+
+ string expectedETag;
+
+ int ttl = settings.get("tarball-ttl", 60 * 60);
+ bool skip = false;
+
+ if (pathExists(fileLink) && pathExists(dataFile)) {
+ storePath = readLink(fileLink);
+ store->addTempRoot(storePath);
+ if (store->isValidPath(storePath)) {
+ auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
+ if (ss.size() >= 3 && ss[0] == url) {
+ time_t lastChecked;
+ if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0))
+ skip = true;
+ else if (!ss[1].empty()) {
+ printMsg(lvlDebug, format("verifying previous ETag ‘%1%’") % ss[1]);
+ expectedETag = ss[1];
+ }
+ }
+ } else
+ storePath = "";
+ }
+
+ string name;
+ auto p = url.rfind('/');
+ if (p != string::npos) name = string(url, p + 1);
+
+ if (!skip) {
+
+ if (storePath.empty())
+ printMsg(lvlInfo, format("downloading ‘%1%’...") % url);
+ else
+ printMsg(lvlInfo, format("checking ‘%1%’...") % url);
+
+ try {
+ auto res = downloadFile(url, expectedETag);
+
+ if (!res.cached)
+ storePath = store->addTextToStore(name, res.data, PathSet(), false);
+
+ assert(!storePath.empty());
+ replaceSymlink(storePath, fileLink);
+
+ writeFile(dataFile, url + "\n" + res.etag + "\n" + int2String(time(0)) + "\n");
+ } catch (DownloadError & e) {
+ if (storePath.empty()) throw;
+ printMsg(lvlError, format("warning: %1%; using cached result") % e.msg());
+ }
+ }
+
+ if (unpack) {
+ Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
+ Path unpackedStorePath;
+ if (pathExists(unpackedLink)) {
+ unpackedStorePath = readLink(unpackedLink);
+ store->addTempRoot(unpackedStorePath);
+ if (!store->isValidPath(unpackedStorePath))
+ unpackedStorePath = "";
+ }
+ if (unpackedStorePath.empty()) {
+ printMsg(lvlInfo, format("unpacking ‘%1%’...") % url);
+ Path tmpDir = createTempDir();
+ AutoDelete autoDelete(tmpDir, true);
+ // FIXME: this requires GNU tar for decompression.
+ runProgram("tar", true, {"xf", storePath, "-C", tmpDir, "--strip-components", "1"}, "");
+ unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, false);
+ }
+ replaceSymlink(unpackedStorePath, unpackedLink);
+ return unpackedStorePath;
+ }
+
+ return storePath;
+}
+
+
+bool isUri(const string & s)
+{
+ size_t pos = s.find("://");
+ if (pos == string::npos) return false;
+ string scheme(s, 0, pos);
+ return scheme == "http" || scheme == "https" || scheme == "file";
+}
+
+
+}
diff --git a/src/libstore/download.hh b/src/libstore/download.hh
new file mode 100644
index 000000000..28c9117e4
--- /dev/null
+++ b/src/libstore/download.hh
@@ -0,0 +1,22 @@
+#pragma once
+
+#include "types.hh"
+#include <string>
+
+namespace nix {
+
+struct DownloadResult
+{
+ bool cached;
+ string data, etag;
+};
+
+DownloadResult downloadFile(string url, string expectedETag = "");
+
+Path downloadFileCached(const string & url, bool unpack);
+
+MakeError(DownloadError, Error)
+
+bool isUri(const string & s);
+
+}
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index 771c06753..bf5c256c9 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc)
libstore_LIBS = libutil libformat
-libstore_LDFLAGS = -lsqlite3 -lbz2
+libstore_LDFLAGS = -lsqlite3 -lbz2 -lcurl
ifeq ($(OS), SunOS)
libstore_LDFLAGS += -lsocket