aboutsummaryrefslogtreecommitdiff
path: root/src/libstore
diff options
context:
space:
mode:
Diffstat (limited to 'src/libstore')
-rw-r--r--src/libstore/build.cc19
-rw-r--r--src/libstore/builtins.hh1
-rw-r--r--src/libstore/builtins/buildenv.cc120
-rw-r--r--src/libstore/builtins/buildenv.hh21
-rw-r--r--src/libstore/derivations.cc2
-rw-r--r--src/libstore/download.cc19
-rw-r--r--src/libstore/download.hh3
-rw-r--r--src/libstore/fetchers/fetchers.cc132
-rw-r--r--src/libstore/fetchers/fetchers.hh109
-rw-r--r--src/libstore/fetchers/git.cc443
-rw-r--r--src/libstore/fetchers/github.cc212
-rw-r--r--src/libstore/fetchers/indirect.cc142
-rw-r--r--src/libstore/fetchers/mercurial.cc331
-rw-r--r--src/libstore/fetchers/parse.cc138
-rw-r--r--src/libstore/fetchers/parse.hh30
-rw-r--r--src/libstore/fetchers/regex.hh37
-rw-r--r--src/libstore/fetchers/registry.cc167
-rw-r--r--src/libstore/fetchers/registry.hh55
-rw-r--r--src/libstore/fetchers/tarball.cc133
-rw-r--r--src/libstore/fetchers/tree-info.hh26
-rw-r--r--src/libstore/globals.hh12
-rw-r--r--src/libstore/http-binary-cache-store.cc5
-rw-r--r--src/libstore/local-store.cc11
-rw-r--r--src/libstore/local.mk8
-rw-r--r--src/libstore/nar-info-disk-cache.cc7
-rw-r--r--src/libstore/nar-info-disk-cache.hh2
-rw-r--r--src/libstore/parsed-derivations.cc6
-rw-r--r--src/libstore/parsed-derivations.hh10
-rw-r--r--src/libstore/profiles.cc18
-rw-r--r--src/libstore/profiles.hh4
-rw-r--r--src/libstore/sqlite.cc25
-rw-r--r--src/libstore/sqlite.hh10
-rw-r--r--src/libstore/store-api.cc32
-rw-r--r--src/libstore/store-api.hh6
34 files changed, 2158 insertions, 138 deletions
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 9c6aedfa5..0e3a23a4d 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -6,6 +6,7 @@
#include "archive.hh"
#include "affinity.hh"
#include "builtins.hh"
+#include "builtins/buildenv.hh"
#include "download.hh"
#include "finally.hh"
#include "compression.hh"
@@ -1397,7 +1398,7 @@ void DerivationGoal::tryToBuild()
few seconds and then retry this goal. */
PathSet lockFiles;
for (auto & outPath : drv->outputPaths())
- lockFiles.insert(worker.store.toRealPath(worker.store.printStorePath(outPath)));
+ lockFiles.insert(worker.store.Store::toRealPath(outPath));
if (!outputLocks.lockPaths(lockFiles, "", false)) {
worker.waitForAWhile(shared_from_this());
@@ -1428,7 +1429,7 @@ void DerivationGoal::tryToBuild()
for (auto & i : drv->outputs) {
if (worker.store.isValidPath(i.second.path)) continue;
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path));
- deletePath(worker.store.toRealPath(worker.store.printStorePath(i.second.path)));
+ deletePath(worker.store.Store::toRealPath(i.second.path));
}
/* Don't do a remote build if the derivation has the attribute
@@ -1685,7 +1686,7 @@ void DerivationGoal::buildDone()
/* Delete unused redirected outputs (when doing hash rewriting). */
for (auto & i : redirectedOutputs)
- deletePath(worker.store.toRealPath(worker.store.printStorePath(i.second)));
+ deletePath(worker.store.Store::toRealPath(i.second));
/* Delete the chroot (if we were using one). */
autoDelChroot.reset(); /* this runs the destructor */
@@ -1904,7 +1905,7 @@ void DerivationGoal::startBuilder()
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
worker.store.printStorePath(drvPath),
settings.thisSystem,
- concatStringsSep(", ", settings.systemFeatures));
+ concatStringsSep<StringSet>(", ", settings.systemFeatures));
if (drv->isBuiltin())
preloadNSS();
@@ -2071,7 +2072,7 @@ void DerivationGoal::startBuilder()
environment using bind-mounts. We put it in the Nix store
to ensure that we can create hard-links to non-directory
inputs in the fake Nix store in the chroot (see below). */
- chrootRootDir = worker.store.toRealPath(worker.store.printStorePath(drvPath)) + ".chroot";
+ chrootRootDir = worker.store.Store::toRealPath(drvPath) + ".chroot";
deletePath(chrootRootDir);
/* Clean up the chroot directory automatically. */
@@ -2550,7 +2551,7 @@ static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
void DerivationGoal::writeStructuredAttrs()
{
- auto & structuredAttrs = parsedDrv->getStructuredAttrs();
+ auto structuredAttrs = parsedDrv->getStructuredAttrs();
if (!structuredAttrs) return;
auto json = *structuredAttrs;
@@ -2916,7 +2917,7 @@ void DerivationGoal::addDependency(const StorePath & path)
#if __linux__
- Path source = worker.store.toRealPath(worker.store.printStorePath(path));
+ Path source = worker.store.Store::toRealPath(path);
Path target = chrootRootDir + worker.store.printStorePath(path);
debug("bind-mounting %s -> %s", target, source);
@@ -3578,7 +3579,7 @@ void DerivationGoal::registerOutputs()
if (needsHashRewrite()) {
auto r = redirectedOutputs.find(i.second.path);
if (r != redirectedOutputs.end()) {
- auto redirected = worker.store.toRealPath(worker.store.printStorePath(r->second));
+ auto redirected = worker.store.Store::toRealPath(r->second);
if (buildMode == bmRepair
&& redirectedBadOutputs.count(i.second.path)
&& pathExists(redirected))
@@ -3671,7 +3672,7 @@ void DerivationGoal::registerOutputs()
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
worker.store.printStorePath(dest), h.to_string(SRI), h2.to_string(SRI)));
- Path actualDest = worker.store.toRealPath(worker.store.printStorePath(dest));
+ Path actualDest = worker.store.Store::toRealPath(dest);
if (worker.store.isValidPath(dest))
std::rethrow_exception(delayedException);
diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh
index 87d6ce665..66597e456 100644
--- a/src/libstore/builtins.hh
+++ b/src/libstore/builtins.hh
@@ -6,7 +6,6 @@ namespace nix {
// TODO: make pluggable.
void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData);
-void builtinBuildenv(const BasicDerivation & drv);
void builtinUnpackChannel(const BasicDerivation & drv);
}
diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc
index 096593886..1b802d908 100644
--- a/src/libstore/builtins/buildenv.cc
+++ b/src/libstore/builtins/buildenv.cc
@@ -1,4 +1,4 @@
-#include "builtins.hh"
+#include "buildenv.hh"
#include <sys/stat.h>
#include <sys/types.h>
@@ -7,16 +7,14 @@
namespace nix {
-typedef std::map<Path,int> Priorities;
-
-// FIXME: change into local variables.
-
-static Priorities priorities;
-
-static unsigned long symlinks;
+struct State
+{
+ std::map<Path, int> priorities;
+ unsigned long symlinks = 0;
+};
/* For each activated package, create symlinks */
-static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
+static void createLinks(State & state, const Path & srcDir, const Path & dstDir, int priority)
{
DirEntries srcFiles;
@@ -67,7 +65,7 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
auto res = lstat(dstFile.c_str(), &dstSt);
if (res == 0) {
if (S_ISDIR(dstSt.st_mode)) {
- createLinks(srcFile, dstFile, priority);
+ createLinks(state, srcFile, dstFile, priority);
continue;
} else if (S_ISLNK(dstSt.st_mode)) {
auto target = canonPath(dstFile, true);
@@ -77,8 +75,8 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
throw SysError(format("unlinking '%1%'") % dstFile);
if (mkdir(dstFile.c_str(), 0755) == -1)
throw SysError(format("creating directory '%1%'"));
- createLinks(target, dstFile, priorities[dstFile]);
- createLinks(srcFile, dstFile, priority);
+ createLinks(state, target, dstFile, state.priorities[dstFile]);
+ createLinks(state, srcFile, dstFile, priority);
continue;
}
} else if (errno != ENOENT)
@@ -90,7 +88,7 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
auto res = lstat(dstFile.c_str(), &dstSt);
if (res == 0) {
if (S_ISLNK(dstSt.st_mode)) {
- auto prevPriority = priorities[dstFile];
+ auto prevPriority = state.priorities[dstFile];
if (prevPriority == priority)
throw Error(
"packages '%1%' and '%2%' have the same priority %3%; "
@@ -109,41 +107,57 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
}
createSymlink(srcFile, dstFile);
- priorities[dstFile] = priority;
- symlinks++;
+ state.priorities[dstFile] = priority;
+ state.symlinks++;
}
}
-typedef std::set<Path> FileProp;
+void buildProfile(const Path & out, Packages && pkgs)
+{
+ State state;
-static FileProp done;
-static FileProp postponed = FileProp{};
+ std::set<Path> done, postponed;
-static Path out;
+ auto addPkg = [&](const Path & pkgDir, int priority) {
+ if (!done.insert(pkgDir).second) return;
+ createLinks(state, pkgDir, out, priority);
-static void addPkg(const Path & pkgDir, int priority)
-{
- if (!done.insert(pkgDir).second) return;
- createLinks(pkgDir, out, priority);
+ try {
+ for (const auto & p : tokenizeString<std::vector<string>>(
+ readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n"))
+ if (!done.count(p))
+ postponed.insert(p);
+ } catch (SysError & e) {
+ if (e.errNo != ENOENT && e.errNo != ENOTDIR) throw;
+ }
+ };
- try {
- for (const auto & p : tokenizeString<std::vector<string>>(
- readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n"))
- if (!done.count(p))
- postponed.insert(p);
- } catch (SysError & e) {
- if (e.errNo != ENOENT && e.errNo != ENOTDIR) throw;
- }
-}
+ /* Symlink to the packages that have been installed explicitly by the
+ * user. Process in priority order to reduce unnecessary
+ * symlink/unlink steps.
+ */
+ std::sort(pkgs.begin(), pkgs.end(), [](const Package & a, const Package & b) {
+ return a.priority < b.priority || (a.priority == b.priority && a.path < b.path);
+ });
+ for (const auto & pkg : pkgs)
+ if (pkg.active)
+ addPkg(pkg.path, pkg.priority);
-struct Package {
- Path path;
- bool active;
- int priority;
- Package(Path path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
-};
+ /* Symlink to the packages that have been "propagated" by packages
+ * installed by the user (i.e., package X declares that it wants Y
+ * installed as well). We do these later because they have a lower
+ * priority in case of collisions.
+ */
+ auto priorityCounter = 1000;
+ while (!postponed.empty()) {
+ std::set<Path> pkgDirs;
+ postponed.swap(pkgDirs);
+ for (const auto & pkgDir : pkgDirs)
+ addPkg(pkgDir, priorityCounter++);
+ }
-typedef std::vector<Package> Packages;
+ debug("created %d symlinks in user environment", state.symlinks);
+}
void builtinBuildenv(const BasicDerivation & drv)
{
@@ -153,7 +167,7 @@ void builtinBuildenv(const BasicDerivation & drv)
return i->second;
};
- out = getAttr("out");
+ Path out = getAttr("out");
createDirs(out);
/* Convert the stuff we get from the environment back into a
@@ -171,31 +185,7 @@ void builtinBuildenv(const BasicDerivation & drv)
}
}
- /* Symlink to the packages that have been installed explicitly by the
- * user. Process in priority order to reduce unnecessary
- * symlink/unlink steps.
- */
- std::sort(pkgs.begin(), pkgs.end(), [](const Package & a, const Package & b) {
- return a.priority < b.priority || (a.priority == b.priority && a.path < b.path);
- });
- for (const auto & pkg : pkgs)
- if (pkg.active)
- addPkg(pkg.path, pkg.priority);
-
- /* Symlink to the packages that have been "propagated" by packages
- * installed by the user (i.e., package X declares that it wants Y
- * installed as well). We do these later because they have a lower
- * priority in case of collisions.
- */
- auto priorityCounter = 1000;
- while (!postponed.empty()) {
- auto pkgDirs = postponed;
- postponed = FileProp{};
- for (const auto & pkgDir : pkgDirs)
- addPkg(pkgDir, priorityCounter++);
- }
-
- printError("created %d symlinks in user environment", symlinks);
+ buildProfile(out, std::move(pkgs));
createSymlink(getAttr("manifest"), out + "/manifest.nix");
}
diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh
new file mode 100644
index 000000000..73c0f5f7f
--- /dev/null
+++ b/src/libstore/builtins/buildenv.hh
@@ -0,0 +1,21 @@
+#pragma once
+
+#include "derivations.hh"
+#include "store-api.hh"
+
+namespace nix {
+
+struct Package {
+ Path path;
+ bool active;
+ int priority;
+ Package(const Path & path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
+};
+
+typedef std::vector<Package> Packages;
+
+void buildProfile(const Path & out, Packages && pkgs);
+
+void builtinBuildenv(const BasicDerivation & drv);
+
+}
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index d9da8769c..a554cb66d 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -354,7 +354,7 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
if (h == drvHashes.end()) {
assert(store.isValidPath(i.first));
h = drvHashes.insert_or_assign(i.first.clone(), hashDerivationModulo(store,
- readDerivation(store, store.toRealPath(store.printStorePath(i.first))), false)).first;
+ readDerivation(store, store.toRealPath(i.first)), false)).first;
}
inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second);
}
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index 149c84765..8e0d7d42a 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -819,6 +819,7 @@ CachedDownloadResult Downloader::downloadCached(
CachedDownloadResult result;
result.storePath = store->printStorePath(*expectedStorePath);
result.path = store->toRealPath(result.storePath);
+ assert(!request.getLastModified); // FIXME
return result;
}
}
@@ -900,35 +901,43 @@ CachedDownloadResult Downloader::downloadCached(
std::optional<StorePath> unpackedStorePath;
if (pathExists(unpackedLink)) {
unpackedStorePath = store->parseStorePath(readLink(unpackedLink));
- // FIXME
store->addTempRoot(*unpackedStorePath);
if (!store->isValidPath(*unpackedStorePath))
unpackedStorePath.reset();
+ else
+ result.lastModified = lstat(unpackedLink).st_mtime;
}
if (!unpackedStorePath) {
printInfo("unpacking '%s'...", url);
Path tmpDir = createTempDir();
AutoDelete autoDelete(tmpDir, true);
- unpackTarfile(store->toRealPath(store->printStorePath(*storePath)), tmpDir);
+ unpackTarfile(store->toRealPath(*storePath), tmpDir);
auto members = readDirectory(tmpDir);
if (members.size() != 1)
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
+ result.lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
}
- replaceSymlink(store->printStorePath(*unpackedStorePath), unpackedLink);
+ // Store the last-modified date of the tarball in the symlink
+ // mtime. This saves us from having to store it somewhere
+ // else.
+ replaceSymlink(store->printStorePath(*unpackedStorePath), unpackedLink, result.lastModified);
storePath = std::move(*unpackedStorePath);
}
if (expectedStorePath && *storePath != *expectedStorePath) {
unsigned int statusCode = 102;
Hash gotHash = request.unpack
- ? hashPath(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath))).first
- : hashFile(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath)));
+ ? hashPath(request.expectedHash.type, store->toRealPath(*storePath)).first
+ : hashFile(request.expectedHash.type, store->toRealPath(*storePath));
throw nix::Error(statusCode, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
url, request.expectedHash.to_string(), gotHash.to_string());
}
+ if (request.gcRoot)
+ store->addIndirectRoot(fileLink);
+
result.storePath = store->printStorePath(*storePath);
result.path = store->toRealPath(result.storePath);
return result;
diff --git a/src/libstore/download.hh b/src/libstore/download.hh
index 5a131c704..487036833 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/download.hh
@@ -72,6 +72,8 @@ struct CachedDownloadRequest
std::string name;
Hash expectedHash;
unsigned int ttl;
+ bool gcRoot = false;
+ bool getLastModified = false;
CachedDownloadRequest(const std::string & uri);
CachedDownloadRequest() = delete;
@@ -85,6 +87,7 @@ struct CachedDownloadResult
Path path;
std::optional<std::string> etag;
std::string effectiveUri;
+ std::optional<time_t> lastModified;
};
class Store;
diff --git a/src/libstore/fetchers/fetchers.cc b/src/libstore/fetchers/fetchers.cc
new file mode 100644
index 000000000..90bdc0fc5
--- /dev/null
+++ b/src/libstore/fetchers/fetchers.cc
@@ -0,0 +1,132 @@
+#include "fetchers.hh"
+#include "parse.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+std::unique_ptr<std::vector<std::unique_ptr<InputScheme>>> inputSchemes = nullptr;
+
+void registerInputScheme(std::unique_ptr<InputScheme> && inputScheme)
+{
+ if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::unique_ptr<InputScheme>>>();
+ inputSchemes->push_back(std::move(inputScheme));
+}
+
+std::unique_ptr<Input> inputFromURL(const ParsedURL & url)
+{
+ for (auto & inputScheme : *inputSchemes) {
+ auto res = inputScheme->inputFromURL(url);
+ if (res) return res;
+ }
+ throw Error("input '%s' is unsupported", url.url);
+}
+
+std::unique_ptr<Input> inputFromURL(const std::string & url)
+{
+ return inputFromURL(parseURL(url));
+}
+
+std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs)
+{
+ for (auto & inputScheme : *inputSchemes) {
+ auto res = inputScheme->inputFromAttrs(attrs);
+ if (res) return res;
+ }
+ throw Error("input '%s' is unsupported", attrsToJson(attrs));
+}
+
+Input::Attrs jsonToAttrs(const nlohmann::json & json)
+{
+ fetchers::Input::Attrs attrs;
+
+ for (auto & i : json.items()) {
+ if (i.value().is_number())
+ attrs.emplace(i.key(), i.value().get<int64_t>());
+ else if (i.value().is_string())
+ attrs.emplace(i.key(), i.value().get<std::string>());
+ else
+ throw Error("unsupported input attribute type in lock file");
+ }
+
+ return attrs;
+}
+
+nlohmann::json attrsToJson(const fetchers::Input::Attrs & attrs)
+{
+ nlohmann::json json;
+ for (auto & attr : attrs) {
+ if (auto v = std::get_if<int64_t>(&attr.second)) {
+ json[attr.first] = *v;
+ } else if (auto v = std::get_if<std::string>(&attr.second)) {
+ json[attr.first] = *v;
+ } else abort();
+ }
+ return json;
+}
+
+Input::Attrs Input::toAttrs() const
+{
+ auto attrs = toAttrsInternal();
+ if (narHash)
+ attrs.emplace("narHash", narHash->to_string(SRI));
+ attrs.emplace("type", type());
+ return attrs;
+}
+
+std::optional<std::string> maybeGetStrAttr(const Input::Attrs & attrs, const std::string & name)
+{
+ auto i = attrs.find(name);
+ if (i == attrs.end()) return {};
+ if (auto v = std::get_if<std::string>(&i->second))
+ return *v;
+ throw Error("input attribute '%s' is not a string", name);
+}
+
+std::string getStrAttr(const Input::Attrs & attrs, const std::string & name)
+{
+ auto s = maybeGetStrAttr(attrs, name);
+ if (!s)
+ throw Error("input attribute '%s' is missing", name);
+ return *s;
+}
+
+std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store) const
+{
+ auto [tree, input] = fetchTreeInternal(store);
+
+ if (tree.actualPath == "")
+ tree.actualPath = store->toRealPath(tree.storePath);
+
+ if (!tree.info.narHash)
+ tree.info.narHash = store->queryPathInfo(tree.storePath)->narHash;
+
+ if (input->narHash)
+ assert(input->narHash == tree.info.narHash);
+
+ if (narHash && narHash != input->narHash)
+ throw Error("NAR hash mismatch in input '%s', expected '%s', got '%s'",
+ to_string(), narHash->to_string(SRI), input->narHash->to_string(SRI));
+
+ return {std::move(tree), input};
+}
+
+std::shared_ptr<const Input> Input::applyOverrides(
+ std::optional<std::string> ref,
+ std::optional<Hash> rev) const
+{
+ if (ref)
+ throw Error("don't know how to apply '%s' to '%s'", *ref, to_string());
+ if (rev)
+ throw Error("don't know how to apply '%s' to '%s'", rev->to_string(Base16, false), to_string());
+ return shared_from_this();
+}
+
+StorePath TreeInfo::computeStorePath(Store & store) const
+{
+ assert(narHash);
+ return store.makeFixedOutputPath(true, narHash, "source");
+}
+
+}
diff --git a/src/libstore/fetchers/fetchers.hh b/src/libstore/fetchers/fetchers.hh
new file mode 100644
index 000000000..4202e8339
--- /dev/null
+++ b/src/libstore/fetchers/fetchers.hh
@@ -0,0 +1,109 @@
+#pragma once
+
+#include "types.hh"
+#include "hash.hh"
+#include "path.hh"
+#include "tree-info.hh"
+
+#include <memory>
+#include <variant>
+
+#include <nlohmann/json_fwd.hpp>
+
+namespace nix { class Store; }
+
+namespace nix::fetchers {
+
+struct Input;
+
+struct Tree
+{
+ Path actualPath;
+ StorePath storePath;
+ TreeInfo info;
+};
+
+struct Input : std::enable_shared_from_this<Input>
+{
+ std::optional<Hash> narHash; // FIXME: implement
+
+ virtual std::string type() const = 0;
+
+ virtual ~Input() { }
+
+ virtual bool operator ==(const Input & other) const { return false; }
+
+ /* Check whether this is a "direct" input, that is, not
+ one that goes through a registry. */
+ virtual bool isDirect() const { return true; }
+
+ /* Check whether this is an "immutable" input, that is,
+ one that contains a commit hash or content hash. */
+ virtual bool isImmutable() const { return (bool) narHash; }
+
+ virtual bool contains(const Input & other) const { return false; }
+
+ virtual std::optional<std::string> getRef() const { return {}; }
+
+ virtual std::optional<Hash> getRev() const { return {}; }
+
+ virtual std::string to_string() const = 0;
+
+ typedef std::variant<std::string, int64_t> Attr;
+ typedef std::map<std::string, Attr> Attrs;
+
+ Attrs toAttrs() const;
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTree(ref<Store> store) const;
+
+ virtual std::shared_ptr<const Input> applyOverrides(
+ std::optional<std::string> ref,
+ std::optional<Hash> rev) const;
+
+ virtual std::optional<Path> getSourcePath() const { return {}; }
+
+ virtual void markChangedFile(
+ std::string_view file,
+ std::optional<std::string> commitMsg) const
+ { assert(false); }
+
+ virtual void clone(const Path & destDir) const
+ {
+ throw Error("do not know how to clone input '%s'", to_string());
+ }
+
+private:
+
+ virtual std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(ref<Store> store) const = 0;
+
+ virtual Attrs toAttrsInternal() const = 0;
+};
+
+struct ParsedURL;
+
+struct InputScheme
+{
+ virtual ~InputScheme() { }
+
+ virtual std::unique_ptr<Input> inputFromURL(const ParsedURL & url) = 0;
+
+ virtual std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs) = 0;
+};
+
+std::unique_ptr<Input> inputFromURL(const ParsedURL & url);
+
+std::unique_ptr<Input> inputFromURL(const std::string & url);
+
+std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs);
+
+void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
+
+Input::Attrs jsonToAttrs(const nlohmann::json & json);
+
+nlohmann::json attrsToJson(const Input::Attrs & attrs);
+
+std::optional<std::string> maybeGetStrAttr(const Input::Attrs & attrs, const std::string & name);
+
+std::string getStrAttr(const Input::Attrs & attrs, const std::string & name);
+
+}
diff --git a/src/libstore/fetchers/git.cc b/src/libstore/fetchers/git.cc
new file mode 100644
index 000000000..134d44ecd
--- /dev/null
+++ b/src/libstore/fetchers/git.cc
@@ -0,0 +1,443 @@
+#include "fetchers.hh"
+#include "parse.hh"
+#include "globals.hh"
+#include "tarfile.hh"
+#include "store-api.hh"
+#include "regex.hh"
+
+#include <sys/time.h>
+
+#include <nlohmann/json.hpp>
+
+using namespace std::string_literals;
+
+namespace nix::fetchers {
+
+static Path getCacheInfoPathFor(const std::string & name, const Hash & rev)
+{
+ Path cacheDir = getCacheDir() + "/nix/git-revs-v2";
+ std::string linkName =
+ name == "source"
+ ? rev.gitRev()
+ : hashString(htSHA512, name + std::string("\0"s) + rev.gitRev()).to_string(Base32, false);
+ return cacheDir + "/" + linkName + ".link";
+}
+
+static std::string readHead(const Path & path)
+{
+ return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
+}
+
+static void cacheGitInfo(
+ Store & store,
+ const std::string & name,
+ const Tree & tree,
+ const Hash & rev)
+{
+ nlohmann::json json;
+ json["storePath"] = store.printStorePath(tree.storePath);
+ json["name"] = name;
+ json["rev"] = rev.gitRev();
+ json["revCount"] = *tree.info.revCount;
+ json["lastModified"] = *tree.info.lastModified;
+
+ auto cacheInfoPath = getCacheInfoPathFor(name, rev);
+ createDirs(dirOf(cacheInfoPath));
+ writeFile(cacheInfoPath, json.dump());
+}
+
+static std::optional<std::pair<Hash, Tree>> lookupGitInfo(
+ ref<Store> store,
+ const std::string & name,
+ const Hash & rev)
+{
+ try {
+ auto json = nlohmann::json::parse(readFile(getCacheInfoPathFor(name, rev)));
+
+ assert(json["name"] == name && Hash((std::string) json["rev"], htSHA1) == rev);
+
+ auto storePath = store->parseStorePath((std::string) json["storePath"]);
+
+ if (store->isValidPath(storePath)) {
+ return {{rev, Tree{
+ .actualPath = store->toRealPath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = json["revCount"],
+ .lastModified = json["lastModified"],
+ }
+ }}};
+ }
+
+ } catch (SysError & e) {
+ if (e.errNo != ENOENT) throw;
+ }
+
+ return {};
+}
+
+struct GitInput : Input
+{
+ ParsedURL url;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+
+ GitInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "git"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const GitInput *>(&other);
+ return
+ other2
+ && url == other2->url
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ std::string to_string() const override
+ {
+ ParsedURL url2(url);
+ if (url2.scheme != "git") url2.scheme = "git+" + url2.scheme;
+ if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
+ if (ref) url2.query.insert_or_assign("ref", *ref);
+ return url2.to_string();
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ void clone(const Path & destDir) const override
+ {
+ auto [isLocal, actualUrl] = getActualUrl();
+
+ Strings args = {"clone"};
+
+ args.push_back(actualUrl);
+
+ if (ref) {
+ args.push_back("--branch");
+ args.push_back(*ref);
+ }
+
+ if (rev) throw Error("cloning a specific revision is not implemented");
+
+ args.push_back(destDir);
+
+ runProgram("git", true, args);
+ }
+
+ std::shared_ptr<const Input> applyOverrides(
+ std::optional<std::string> ref,
+ std::optional<Hash> rev) const override
+ {
+ if (!ref && !rev) return shared_from_this();
+
+ auto res = std::make_shared<GitInput>(*this);
+
+ if (ref) res->ref = ref;
+ if (rev) res->rev = rev;
+
+ if (!res->ref && res->rev)
+ throw Error("Git input '%s' has a commit hash but no branch/tag name", res->to_string());
+
+ return res;
+ }
+
+ std::optional<Path> getSourcePath() const override
+ {
+ if (url.scheme == "file" && !ref && !rev)
+ return url.path;
+ return {};
+ }
+ void markChangedFile(std::string_view file, std::optional<std::string> commitMsg) const override
+ {
+ auto sourcePath = getSourcePath();
+ assert(sourcePath);
+
+ runProgram("git", true,
+ { "-C", *sourcePath, "add", "--force", "--intent-to-add", std::string(file) });
+
+ if (commitMsg)
+ runProgram("git", true,
+ { "-C", *sourcePath, "commit", std::string(file), "-m", *commitMsg });
+ }
+
+ std::pair<bool, std::string> getActualUrl() const
+ {
+ // Don't clone file:// URIs (but otherwise treat them the
+ // same as remote URIs, i.e. don't use the working tree or
+ // HEAD).
+ static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
+ bool isLocal = url.scheme == "file" && !forceHttp;
+ return {isLocal, isLocal ? url.path : url.base};
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto name = "source";
+
+ auto input = std::make_shared<GitInput>(*this);
+
+ assert(!rev || rev->type == htSHA1);
+
+ if (rev) {
+ if (auto tree = lookupGitInfo(store, name, *rev)) {
+ input->rev = tree->first;
+ return {std::move(tree->second), input};
+ }
+ }
+
+ auto [isLocal, actualUrl_] = getActualUrl();
+ auto actualUrl = actualUrl_; // work around clang bug
+
+ // If this is a local directory and no ref or revision is
+ // given, then allow the use of an unclean working tree.
+ if (!input->ref && !input->rev && isLocal) {
+ bool clean = false;
+
+ /* Check whether this repo has any commits. There are
+ probably better ways to do this. */
+ bool haveCommits = !readDirectory(actualUrl + "/.git/refs/heads").empty();
+
+ try {
+ if (haveCommits) {
+ runProgram("git", true, { "-C", actualUrl, "diff-index", "--quiet", "HEAD", "--" });
+ clean = true;
+ }
+ } catch (ExecError & e) {
+ if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
+ }
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked files. */
+
+ if (!settings.allowDirty)
+ throw Error("Git tree '%s' is dirty", actualUrl);
+
+ if (settings.warnDirty)
+ warn("Git tree '%s' is dirty", actualUrl);
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("git", true, { "-C", actualUrl, "ls-files", "-z" }), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, actualUrl));
+ std::string file(p, actualUrl.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto prefix = file + "/";
+ auto i = files.lower_bound(prefix);
+ return i != files.end() && hasPrefix(*i, prefix);
+ }
+
+ return files.count(file);
+ };
+
+ auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
+
+ auto tree = Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "rev-list", "--count", "HEAD" })) : 0,
+ // FIXME: maybe we should use the timestamp of the last
+ // modified dirty file?
+ .lastModified = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0,
+ }
+ };
+
+ return {std::move(tree), input};
+ }
+ }
+
+ if (!input->ref) input->ref = isLocal ? readHead(actualUrl) : "master";
+
+ Path repoDir;
+
+ if (isLocal) {
+
+ if (!input->rev)
+ input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
+
+ repoDir = actualUrl;
+
+ } else {
+
+ Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
+ repoDir = cacheDir;
+
+ if (!pathExists(cacheDir)) {
+ createDirs(dirOf(cacheDir));
+ runProgram("git", true, { "init", "--bare", repoDir });
+ }
+
+ Path localRefFile =
+ input->ref->compare(0, 5, "refs/") == 0
+ ? cacheDir + "/" + *input->ref
+ : cacheDir + "/refs/heads/" + *input->ref;
+
+ bool doFetch;
+ time_t now = time(0);
+
+ /* If a rev was specified, we need to fetch if it's not in the
+ repo. */
+ if (input->rev) {
+ try {
+ runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input->rev->gitRev() });
+ doFetch = false;
+ } catch (ExecError & e) {
+ if (WIFEXITED(e.status)) {
+ doFetch = true;
+ } else {
+ throw;
+ }
+ }
+ } else {
+ /* If the local ref is older than ‘tarball-ttl’ seconds, do a
+ git fetch to update the local ref to the remote ref. */
+ struct stat st;
+ doFetch = stat(localRefFile.c_str(), &st) != 0 ||
+ (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
+ }
+
+ if (doFetch) {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
+
+ // FIXME: git stderr messes up our progress indicator, so
+ // we're using --quiet for now. Should process its stderr.
+ try {
+ runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", *input->ref, *input->ref) });
+ } catch (Error & e) {
+ if (!pathExists(localRefFile)) throw;
+ warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
+ }
+
+ struct timeval times[2];
+ times[0].tv_sec = now;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = now;
+ times[1].tv_usec = 0;
+
+ utimes(localRefFile.c_str(), times);
+ }
+
+ if (!input->rev)
+ input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
+ }
+
+ if (auto tree = lookupGitInfo(store, name, *input->rev)) {
+ assert(*input->rev == tree->first);
+ return {std::move(tree->second), input};
+ }
+
+ // FIXME: check whether rev is an ancestor of ref.
+
+ printTalkative("using revision %s of repo '%s'", input->rev->gitRev(), actualUrl);
+
+ // FIXME: should pipe this, or find some better way to extract a
+ // revision.
+ auto source = sinkToSource([&](Sink & sink) {
+ RunOptions gitOptions("git", { "-C", repoDir, "archive", input->rev->gitRev() });
+ gitOptions.standardOut = &sink;
+ runProgram2(gitOptions);
+ });
+
+ Path tmpDir = createTempDir();
+ AutoDelete delTmpDir(tmpDir, true);
+
+ unpackTarfile(*source, tmpDir);
+
+ auto storePath = store->addToStore(name, tmpDir);
+ auto revCount = std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input->rev->gitRev() }));
+ auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
+
+ auto tree = Tree {
+ .actualPath = store->toRealPath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = revCount,
+ .lastModified = lastModified
+ }
+ };
+
+ cacheGitInfo(*store, name, tree, *input->rev);
+
+ return {std::move(tree), input};
+ }
+};
+
+struct GitInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "git" &&
+ url.scheme != "git+http" &&
+ url.scheme != "git+https" &&
+ url.scheme != "git+ssh" &&
+ url.scheme != "git+file") return nullptr;
+
+ auto url2(url);
+ if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
+ url2.query.clear();
+
+ Input::Attrs attrs;
+ attrs.emplace("type", "git");
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev" || name == "ref")
+ attrs.emplace(name, value);
+ else
+ url2.query.emplace(name, value);
+ }
+
+ attrs.emplace("url", url2.to_string());
+
+ return inputFromAttrs(attrs);
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "git") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "ref" && name != "rev")
+ throw Error("unsupported Git input attribute '%s'", name);
+
+ auto input = std::make_unique<GitInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto ref = maybeGetStrAttr(attrs, "ref")) {
+ if (!std::regex_match(*ref, refRegex))
+ throw BadURL("invalid Git branch/tag name '%s'", *ref);
+ input->ref = *ref;
+ }
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
+
+}
diff --git a/src/libstore/fetchers/github.cc b/src/libstore/fetchers/github.cc
new file mode 100644
index 000000000..0a000e83f
--- /dev/null
+++ b/src/libstore/fetchers/github.cc
@@ -0,0 +1,212 @@
+#include "fetchers.hh"
+#include "download.hh"
+#include "globals.hh"
+#include "parse.hh"
+#include "regex.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+std::regex ownerRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
+std::regex repoRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
+
+struct GitHubInput : Input
+{
+ std::string owner;
+ std::string repo;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+
+ std::string type() const override { return "github"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const GitHubInput *>(&other);
+ return
+ other2
+ && owner == other2->owner
+ && repo == other2->repo
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ std::string to_string() const override
+ {
+ auto s = fmt("github:%s/%s", owner, repo);
+ assert(!(ref && rev));
+ if (ref) s += "/" + *ref;
+ if (rev) s += "/" + rev->to_string(Base16, false);
+ return s;
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("owner", owner);
+ attrs.emplace("repo", repo);
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ void clone(const Path & destDir) const override
+ {
+ std::shared_ptr<const Input> input = inputFromURL(fmt("git+ssh://git@github.com/%s/%s.git", owner, repo));
+ input = input->applyOverrides(ref.value_or("master"), rev);
+ input->clone(destDir);
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto rev = this->rev;
+
+ #if 0
+ if (rev) {
+ if (auto gitInfo = lookupGitInfo(store, "source", *rev))
+ return *gitInfo;
+ }
+ #endif
+
+ if (!rev) {
+ auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
+ owner, repo, ref ? *ref : "master");
+ CachedDownloadRequest request(url);
+ request.ttl = rev ? 1000000000 : settings.tarballTtl;
+ auto result = getDownloader()->downloadCached(store, request);
+ auto json = nlohmann::json::parse(readFile(result.path));
+ rev = Hash(json["sha"], htSHA1);
+ debug("HEAD revision for '%s' is %s", url, rev->gitRev());
+ }
+
+ // FIXME: use regular /archive URLs instead? api.github.com
+ // might have stricter rate limits.
+
+ auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
+ owner, repo, rev->to_string(Base16, false));
+
+ std::string accessToken = settings.githubAccessToken.get();
+ if (accessToken != "")
+ url += "?access_token=" + accessToken;
+
+ CachedDownloadRequest request(url);
+ request.unpack = true;
+ request.name = "source";
+ request.ttl = 1000000000;
+ request.getLastModified = true;
+ auto dresult = getDownloader()->downloadCached(store, request);
+
+ assert(dresult.lastModified);
+
+ Tree result{
+ .actualPath = dresult.path,
+ .storePath = store->parseStorePath(dresult.storePath),
+ .info = TreeInfo {
+ .lastModified = *dresult.lastModified,
+ },
+ };
+
+ #if 0
+ // FIXME: this can overwrite a cache file that contains a revCount.
+ cacheGitInfo("source", gitInfo);
+ #endif
+
+ auto input = std::make_shared<GitHubInput>(*this);
+ input->ref = {};
+ input->rev = *rev;
+
+ return {std::move(result), input};
+ }
+
+ std::shared_ptr<const Input> applyOverrides(
+ std::optional<std::string> ref,
+ std::optional<Hash> rev) const override
+ {
+ if (!ref && !rev) return shared_from_this();
+
+ auto res = std::make_shared<GitHubInput>(*this);
+
+ if (ref) res->ref = ref;
+ if (rev) res->rev = rev;
+
+ return res;
+ }
+};
+
+struct GitHubInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "github") return nullptr;
+
+ auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
+ auto input = std::make_unique<GitHubInput>();
+
+ if (path.size() == 2) {
+ } else if (path.size() == 3) {
+ if (std::regex_match(path[2], revRegex))
+ input->rev = Hash(path[2], htSHA1);
+ else if (std::regex_match(path[2], refRegex))
+ input->ref = path[2];
+ else
+ throw BadURL("in GitHub URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
+ } else
+ throw BadURL("GitHub URL '%s' is invalid", url.url);
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev") {
+ if (input->rev)
+ throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
+ input->rev = Hash(value, htSHA1);
+ }
+ else if (name == "ref") {
+ if (!std::regex_match(value, refRegex))
+ throw BadURL("GitHub URL '%s' contains an invalid branch/tag name", url.url);
+ if (input->ref)
+ throw BadURL("GitHub URL '%s' contains multiple branch/tag names", url.url);
+ input->ref = value;
+ }
+ }
+
+ if (input->ref && input->rev)
+ throw BadURL("GitHub URL '%s' contains both a commit hash and a branch/tag name", url.url);
+
+ input->owner = path[0];
+ input->repo = path[1];
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "github") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev")
+ throw Error("unsupported GitHub input attribute '%s'", name);
+
+ auto input = std::make_unique<GitHubInput>();
+ input->owner = getStrAttr(attrs, "owner");
+ input->repo = getStrAttr(attrs, "repo");
+ input->ref = maybeGetStrAttr(attrs, "ref");
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
+
+}
diff --git a/src/libstore/fetchers/indirect.cc b/src/libstore/fetchers/indirect.cc
new file mode 100644
index 000000000..016f5fb39
--- /dev/null
+++ b/src/libstore/fetchers/indirect.cc
@@ -0,0 +1,142 @@
+#include "fetchers.hh"
+#include "parse.hh"
+#include "regex.hh"
+
+namespace nix::fetchers {
+
+std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
+
+struct IndirectInput : Input
+{
+ std::string id;
+ std::optional<Hash> rev;
+ std::optional<std::string> ref;
+
+ std::string type() const override { return "indirect"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const IndirectInput *>(&other);
+ return
+ other2
+ && id == other2->id
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isDirect() const override
+ {
+ return false;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ bool contains(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const IndirectInput *>(&other);
+ return
+ other2
+ && id == other2->id
+ && (!ref || ref == other2->ref)
+ && (!rev || rev == other2->rev);
+ }
+
+ std::string to_string() const override
+ {
+ ParsedURL url;
+ url.scheme = "flake";
+ url.path = id;
+ if (ref) { url.path += '/'; url.path += *ref; };
+ if (rev) { url.path += '/'; url.path += rev->gitRev(); };
+ return url.to_string();
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("id", id);
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ std::shared_ptr<const Input> applyOverrides(
+ std::optional<std::string> ref,
+ std::optional<Hash> rev) const override
+ {
+ if (!ref && !rev) return shared_from_this();
+
+ auto res = std::make_shared<IndirectInput>(*this);
+
+ if (ref) res->ref = ref;
+ if (rev) res->rev = rev;
+
+ return res;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ throw Error("indirect input '%s' cannot be fetched directly", to_string());
+ }
+};
+
+struct IndirectInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "flake") return nullptr;
+
+ auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
+ auto input = std::make_unique<IndirectInput>();
+
+ if (path.size() == 1) {
+ } else if (path.size() == 2) {
+ if (std::regex_match(path[1], revRegex))
+ input->rev = Hash(path[1], htSHA1);
+ else if (std::regex_match(path[1], refRegex))
+ input->ref = path[1];
+ else
+ throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]);
+ } else if (path.size() == 3) {
+ if (!std::regex_match(path[1], refRegex))
+ throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url.url, path[1]);
+ input->ref = path[1];
+ if (!std::regex_match(path[2], revRegex))
+ throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
+ input->rev = Hash(path[2], htSHA1);
+ } else
+ throw BadURL("GitHub URL '%s' is invalid", url.url);
+
+ // FIXME: forbid query params?
+
+ input->id = path[0];
+ if (!std::regex_match(input->id, flakeRegex))
+ throw BadURL("'%s' is not a valid flake ID", input->id);
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "id" && name != "ref" && name != "rev")
+ throw Error("unsupported indirect input attribute '%s'", name);
+
+ auto input = std::make_unique<IndirectInput>();
+ input->id = getStrAttr(attrs, "id");
+ input->ref = maybeGetStrAttr(attrs, "ref");
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
+
+}
diff --git a/src/libstore/fetchers/mercurial.cc b/src/libstore/fetchers/mercurial.cc
new file mode 100644
index 000000000..6ab0add1d
--- /dev/null
+++ b/src/libstore/fetchers/mercurial.cc
@@ -0,0 +1,331 @@
+#include "fetchers.hh"
+#include "parse.hh"
+#include "globals.hh"
+#include "tarfile.hh"
+#include "store-api.hh"
+#include "regex.hh"
+
+#include <sys/time.h>
+
+#include <nlohmann/json.hpp>
+
+using namespace std::string_literals;
+
+namespace nix::fetchers {
+
+struct MercurialInput : Input
+{
+ ParsedURL url;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+
+ MercurialInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "hg"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const MercurialInput *>(&other);
+ return
+ other2
+ && url == other2->url
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ std::string to_string() const override
+ {
+ ParsedURL url2(url);
+ url2.scheme = "hg+" + url2.scheme;
+ if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
+ if (ref) url2.query.insert_or_assign("ref", *ref);
+ return url2.to_string();
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ std::shared_ptr<const Input> applyOverrides(
+ std::optional<std::string> ref,
+ std::optional<Hash> rev) const override
+ {
+ if (!ref && !rev) return shared_from_this();
+
+ auto res = std::make_shared<MercurialInput>(*this);
+
+ if (ref) res->ref = ref;
+ if (rev) res->rev = rev;
+
+ return res;
+ }
+
+ std::optional<Path> getSourcePath() const
+ {
+ if (url.scheme == "file" && !ref && !rev)
+ return url.path;
+ return {};
+ }
+
+ void markChangedFile(std::string_view file, std::optional<std::string> commitMsg) const override
+ {
+ auto sourcePath = getSourcePath();
+ assert(sourcePath);
+
+ // FIXME: shut up if file is already tracked.
+ runProgram("hg", true,
+ { "add", *sourcePath + "/" + std::string(file) });
+
+ if (commitMsg)
+ runProgram("hg", true,
+ { "commit", *sourcePath + "/" + std::string(file), "-m", *commitMsg });
+ }
+
+ std::pair<bool, std::string> getActualUrl() const
+ {
+ bool isLocal = url.scheme == "file";
+ return {isLocal, isLocal ? url.path : url.base};
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto name = "source";
+
+ auto input = std::make_shared<MercurialInput>(*this);
+
+ auto [isLocal, actualUrl_] = getActualUrl();
+ auto actualUrl = actualUrl_; // work around clang bug
+
+ // FIXME: return lastModified.
+
+ // FIXME: don't clone local repositories.
+
+ if (!input->ref && !input->rev && isLocal && pathExists(actualUrl + "/.hg")) {
+
+ bool clean = runProgram("hg", true, { "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == "";
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked
+ files. */
+
+ if (!settings.allowDirty)
+ throw Error("Mercurial tree '%s' is unclean", actualUrl);
+
+ if (settings.warnDirty)
+ warn("Mercurial tree '%s' is unclean", actualUrl);
+
+ input->ref = chomp(runProgram("hg", true, { "branch", "-R", actualUrl }));
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("hg", true, { "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, actualUrl));
+ std::string file(p, actualUrl.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto prefix = file + "/";
+ auto i = files.lower_bound(prefix);
+ return i != files.end() && hasPrefix(*i, prefix);
+ }
+
+ return files.count(file);
+ };
+
+ auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
+
+ return {Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ }, input};
+ }
+ }
+
+ if (!input->ref) input->ref = "default";
+
+ Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
+
+ assert(input->rev || input->ref);
+ auto revOrRef = input->rev ? input->rev->gitRev() : *input->ref;
+
+ Path stampFile = fmt("%s/.hg/%s.stamp", cacheDir, hashString(htSHA512, revOrRef).to_string(Base32, false));
+
+ /* If we haven't pulled this repo less than ‘tarball-ttl’ seconds,
+ do so now. */
+ time_t now = time(0);
+ struct stat st;
+ if (stat(stampFile.c_str(), &st) != 0 ||
+ (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now)
+ {
+ /* Except that if this is a commit hash that we already have,
+ we don't have to pull again. */
+ if (!(input->rev
+ && pathExists(cacheDir)
+ && runProgram(
+ RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
+ .killStderr(true)).second == "1"))
+ {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
+
+ if (pathExists(cacheDir)) {
+ try {
+ runProgram("hg", true, { "pull", "-R", cacheDir, "--", actualUrl });
+ }
+ catch (ExecError & e) {
+ string transJournal = cacheDir + "/.hg/store/journal";
+ /* hg throws "abandoned transaction" error only if this file exists */
+ if (pathExists(transJournal)) {
+ runProgram("hg", true, { "recover", "-R", cacheDir });
+ runProgram("hg", true, { "pull", "-R", cacheDir, "--", actualUrl });
+ } else {
+ throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status)));
+ }
+ }
+ } else {
+ createDirs(dirOf(cacheDir));
+ runProgram("hg", true, { "clone", "--noupdate", "--", actualUrl, cacheDir });
+ }
+ }
+
+ writeFile(stampFile, "");
+ }
+
+ auto tokens = tokenizeString<std::vector<std::string>>(
+ runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
+ assert(tokens.size() == 3);
+
+ input->rev = Hash(tokens[0], htSHA1);
+ auto revCount = std::stoull(tokens[1]);
+ input->ref = tokens[2];
+
+ std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + input->rev->gitRev()).to_string(Base32, false);
+ Path storeLink = fmt("%s/.hg/%s.link", cacheDir, storeLinkName);
+
+ try {
+ auto json = nlohmann::json::parse(readFile(storeLink));
+
+ assert(json["name"] == name && json["rev"] == input->rev->gitRev());
+
+ auto storePath = store->parseStorePath((std::string) json["storePath"]);
+
+ if (store->isValidPath(storePath)) {
+ printTalkative("using cached Mercurial store path '%s'", store->printStorePath(storePath));
+ return {
+ Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = revCount,
+ },
+ },
+ input
+ };
+ }
+
+ } catch (SysError & e) {
+ if (e.errNo != ENOENT) throw;
+ }
+
+ Path tmpDir = createTempDir();
+ AutoDelete delTmpDir(tmpDir, true);
+
+ runProgram("hg", true, { "archive", "-R", cacheDir, "-r", input->rev->gitRev(), tmpDir });
+
+ deletePath(tmpDir + "/.hg_archival.txt");
+
+ auto storePath = store->addToStore(name, tmpDir);
+
+ nlohmann::json json;
+ json["storePath"] = store->printStorePath(storePath);
+ json["uri"] = actualUrl;
+ json["name"] = name;
+ json["branch"] = *input->ref;
+ json["rev"] = input->rev->gitRev();
+ json["revCount"] = revCount;
+
+ writeFile(storeLink, json.dump());
+
+ return {
+ Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = revCount
+ }
+ },
+ input
+ };
+ }
+};
+
+struct MercurialInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "hg+http" &&
+ url.scheme != "hg+https" &&
+ url.scheme != "hg+ssh" &&
+ url.scheme != "hg+file") return nullptr;
+
+ auto url2(url);
+ url2.scheme = std::string(url2.scheme, 3);
+ url2.query.clear();
+
+ Input::Attrs attrs;
+ attrs.emplace("type", "hg");
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev" || name == "ref")
+ attrs.emplace(name, value);
+ else
+ url2.query.emplace(name, value);
+ }
+
+ attrs.emplace("url", url2.to_string());
+
+ return inputFromAttrs(attrs);
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "hg") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "ref" && name != "rev")
+ throw Error("unsupported Mercurial input attribute '%s'", name);
+
+ auto input = std::make_unique<MercurialInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto ref = maybeGetStrAttr(attrs, "ref")) {
+ if (!std::regex_match(*ref, refRegex))
+ throw BadURL("invalid Mercurial branch/tag name '%s'", *ref);
+ input->ref = *ref;
+ }
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
+
+}
diff --git a/src/libstore/fetchers/parse.cc b/src/libstore/fetchers/parse.cc
new file mode 100644
index 000000000..4f7cb3c6b
--- /dev/null
+++ b/src/libstore/fetchers/parse.cc
@@ -0,0 +1,138 @@
+#include "parse.hh"
+#include "util.hh"
+#include "regex.hh"
+
+namespace nix::fetchers {
+
+std::regex refRegex(refRegexS, std::regex::ECMAScript);
+std::regex revRegex(revRegexS, std::regex::ECMAScript);
+std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
+
+ParsedURL parseURL(const std::string & url)
+{
+ static std::regex uriRegex(
+ "((" + schemeRegex + "):"
+ + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))"
+ + "(?:\\?(" + queryRegex + "))?"
+ + "(?:#(" + queryRegex + "))?",
+ std::regex::ECMAScript);
+
+ std::smatch match;
+
+ if (std::regex_match(url, match, uriRegex)) {
+ auto & base = match[1];
+ std::string scheme = match[2];
+ auto authority = match[4].matched
+ ? std::optional<std::string>(match[5]) : std::nullopt;
+ std::string path = match[4].matched ? match[4] : match[5];
+ auto & query = match[6];
+ auto & fragment = match[7];
+
+ auto isFile = scheme.find("file") != std::string::npos;
+
+ if (authority && *authority != "" && isFile)
+ throw Error("file:// URL '%s' has unexpected authority '%s'",
+ url, *authority);
+
+ if (isFile && path.empty())
+ path = "/";
+
+ return ParsedURL{
+ .url = url,
+ .base = base,
+ .scheme = scheme,
+ .authority = authority,
+ .path = path,
+ .query = decodeQuery(query),
+ .fragment = percentDecode(std::string(fragment))
+ };
+ }
+
+ else
+ throw BadURL("'%s' is not a valid URL", url);
+}
+
+std::string percentDecode(std::string_view in)
+{
+ std::string decoded;
+ for (size_t i = 0; i < in.size(); ) {
+ if (in[i] == '%') {
+ if (i + 2 >= in.size())
+ throw BadURL("invalid URI parameter '%s'", in);
+ try {
+ decoded += std::stoul(std::string(in, i + 1, 2), 0, 16);
+ i += 3;
+ } catch (...) {
+ throw BadURL("invalid URI parameter '%s'", in);
+ }
+ } else
+ decoded += in[i++];
+ }
+ return decoded;
+}
+
+std::map<std::string, std::string> decodeQuery(const std::string & query)
+{
+ std::map<std::string, std::string> result;
+
+ for (auto s : tokenizeString<Strings>(query, "&")) {
+ auto e = s.find('=');
+ if (e != std::string::npos)
+ result.emplace(
+ s.substr(0, e),
+ percentDecode(std::string_view(s).substr(e + 1)));
+ }
+
+ return result;
+}
+
+std::string percentEncode(std::string_view s)
+{
+ std::string res;
+ for (auto & c : s)
+ if ((c >= 'a' && c <= 'z')
+ || (c >= 'A' && c <= 'Z')
+ || (c >= '0' && c <= '9')
+ || strchr("-._~!$&'()*+,;=:@", c))
+ res += c;
+ else
+ res += fmt("%%%02x", (unsigned int) c);
+ return res;
+}
+
+std::string encodeQuery(const std::map<std::string, std::string> & ss)
+{
+ std::string res;
+ bool first = true;
+ for (auto & [name, value] : ss) {
+ if (!first) res += '&';
+ first = false;
+ res += percentEncode(name);
+ res += '=';
+ res += percentEncode(value);
+ }
+ return res;
+}
+
+std::string ParsedURL::to_string() const
+{
+ return
+ scheme
+ + ":"
+ + (authority ? "//" + *authority : "")
+ + path
+ + (query.empty() ? "" : "?" + encodeQuery(query))
+ + (fragment.empty() ? "" : "#" + percentEncode(fragment));
+}
+
+bool ParsedURL::operator ==(const ParsedURL & other) const
+{
+ return
+ scheme == other.scheme
+ && authority == other.authority
+ && path == other.path
+ && query == other.query
+ && fragment == other.fragment;
+}
+
+}
diff --git a/src/libstore/fetchers/parse.hh b/src/libstore/fetchers/parse.hh
new file mode 100644
index 000000000..45d5182b0
--- /dev/null
+++ b/src/libstore/fetchers/parse.hh
@@ -0,0 +1,30 @@
+#pragma once
+
+#include "types.hh"
+
+namespace nix::fetchers {
+
+struct ParsedURL
+{
+ std::string url;
+ std::string base; // URL without query/fragment
+ std::string scheme;
+ std::optional<std::string> authority;
+ std::string path;
+ std::map<std::string, std::string> query;
+ std::string fragment;
+
+ std::string to_string() const;
+
+ bool operator ==(const ParsedURL & other) const;
+};
+
+MakeError(BadURL, Error);
+
+std::string percentDecode(std::string_view in);
+
+std::map<std::string, std::string> decodeQuery(const std::string & query);
+
+ParsedURL parseURL(const std::string & url);
+
+}
diff --git a/src/libstore/fetchers/regex.hh b/src/libstore/fetchers/regex.hh
new file mode 100644
index 000000000..e0989edfc
--- /dev/null
+++ b/src/libstore/fetchers/regex.hh
@@ -0,0 +1,37 @@
+#pragma once
+
+#include <regex>
+
+namespace nix::fetchers {
+
+// URI stuff.
+const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
+const static std::string schemeRegex = "(?:[a-z+]+)";
+const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
+const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
+const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
+const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
+const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
+const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
+const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
+const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
+const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
+const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
+const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
+const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
+
+// A Git ref (i.e. branch or tag name).
+const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
+extern std::regex refRegex;
+
+// A Git revision (a SHA-1 commit hash).
+const static std::string revRegexS = "[0-9a-fA-F]{40}";
+extern std::regex revRegex;
+
+// A ref or revision, or a ref followed by a revision.
+const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
+
+const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
+extern std::regex flakeIdRegex;
+
+}
diff --git a/src/libstore/fetchers/registry.cc b/src/libstore/fetchers/registry.cc
new file mode 100644
index 000000000..1fd42a169
--- /dev/null
+++ b/src/libstore/fetchers/registry.cc
@@ -0,0 +1,167 @@
+#include "registry.hh"
+#include "util.hh"
+#include "fetchers.hh"
+#include "globals.hh"
+#include "download.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+std::shared_ptr<Registry> Registry::read(
+ const Path & path, RegistryType type)
+{
+ auto registry = std::make_shared<Registry>(type);
+
+ if (!pathExists(path))
+ return std::make_shared<Registry>(type);
+
+ auto json = nlohmann::json::parse(readFile(path));
+
+ auto version = json.value("version", 0);
+
+ // FIXME: remove soon
+ if (version == 1) {
+ auto flakes = json["flakes"];
+ for (auto i = flakes.begin(); i != flakes.end(); ++i) {
+ auto url = i->value("url", i->value("uri", ""));
+ if (url.empty())
+ throw Error("flake registry '%s' lacks a 'url' attribute for entry '%s'",
+ path, i.key());
+ registry->entries.push_back(
+ {inputFromURL(i.key()), inputFromURL(url)});
+ }
+ }
+
+ else if (version == 2) {
+ for (auto & i : json["flakes"])
+ registry->entries.push_back(
+ { inputFromAttrs(jsonToAttrs(i["from"]))
+ , inputFromAttrs(jsonToAttrs(i["to"]))
+ });
+ }
+
+ else
+ throw Error("flake registry '%s' has unsupported version %d", path, version);
+
+
+ return registry;
+}
+
+void Registry::write(const Path & path)
+{
+ nlohmann::json arr;
+ for (auto & elem : entries) {
+ nlohmann::json obj;
+ obj["from"] = attrsToJson(elem.first->toAttrs());
+ obj["to"] = attrsToJson(elem.second->toAttrs());
+ arr.emplace_back(std::move(obj));
+ }
+
+ nlohmann::json json;
+ json["version"] = 2;
+ json["flakes"] = std::move(arr);
+
+ createDirs(dirOf(path));
+ writeFile(path, json.dump(2));
+}
+
+void Registry::add(
+ const std::shared_ptr<const Input> & from,
+ const std::shared_ptr<const Input> & to)
+{
+ entries.emplace_back(from, to);
+}
+
+void Registry::remove(const std::shared_ptr<const Input> & input)
+{
+ // FIXME: use C++20 std::erase.
+ for (auto i = entries.begin(); i != entries.end(); )
+ if (*i->first == *input)
+ i = entries.erase(i);
+ else
+ ++i;
+}
+
+Path getUserRegistryPath()
+{
+ return getHome() + "/.config/nix/registry.json";
+}
+
+std::shared_ptr<Registry> getUserRegistry()
+{
+ return Registry::read(getUserRegistryPath(), Registry::User);
+}
+
+static std::shared_ptr<Registry> flagRegistry =
+ std::make_shared<Registry>(Registry::Flag);
+
+std::shared_ptr<Registry> getFlagRegistry()
+{
+ return flagRegistry;
+}
+
+void overrideRegistry(
+ const std::shared_ptr<const Input> & from,
+ const std::shared_ptr<const Input> & to)
+{
+ flagRegistry->add(from, to);
+}
+
+static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
+{
+ static auto reg = [&]() {
+ auto path = settings.flakeRegistry;
+
+ if (!hasPrefix(path, "/")) {
+ CachedDownloadRequest request(path);
+ request.name = "flake-registry.json";
+ request.gcRoot = true;
+ path = getDownloader()->downloadCached(store, request).path;
+ }
+
+ return Registry::read(path, Registry::Global);
+ }();
+
+ return reg;
+}
+
+Registries getRegistries(ref<Store> store)
+{
+ Registries registries;
+ registries.push_back(getFlagRegistry());
+ registries.push_back(getUserRegistry());
+ registries.push_back(getGlobalRegistry(store));
+ return registries;
+}
+
+std::shared_ptr<const Input> lookupInRegistries(
+ ref<Store> store,
+ std::shared_ptr<const Input> input)
+{
+ int n = 0;
+
+ restart:
+
+ n++;
+ if (n > 100) throw Error("cycle detected in flake registr for '%s'", input);
+
+ for (auto & registry : getRegistries(store)) {
+ // FIXME: O(n)
+ for (auto & entry : registry->entries) {
+ if (entry.first->contains(*input)) {
+ input = entry.second->applyOverrides(
+ !entry.first->getRef() && input->getRef() ? input->getRef() : std::optional<std::string>(),
+ !entry.first->getRev() && input->getRev() ? input->getRev() : std::optional<Hash>());
+ goto restart;
+ }
+ }
+ }
+
+ if (!input->isDirect())
+ throw Error("cannot find flake '%s' in the flake registries", input->to_string());
+
+ return input;
+}
+
+}
diff --git a/src/libstore/fetchers/registry.hh b/src/libstore/fetchers/registry.hh
new file mode 100644
index 000000000..e29f78486
--- /dev/null
+++ b/src/libstore/fetchers/registry.hh
@@ -0,0 +1,55 @@
+#pragma once
+
+#include "types.hh"
+
+namespace nix { class Store; }
+
+namespace nix::fetchers {
+
+struct Input;
+
+struct Registry
+{
+ enum RegistryType {
+ Flag = 0,
+ User = 1,
+ Global = 2,
+ };
+
+ RegistryType type;
+
+ std::vector<std::pair<std::shared_ptr<const Input>, std::shared_ptr<const Input>>> entries;
+
+ Registry(RegistryType type)
+ : type(type)
+ { }
+
+ static std::shared_ptr<Registry> read(
+ const Path & path, RegistryType type);
+
+ void write(const Path & path);
+
+ void add(
+ const std::shared_ptr<const Input> & from,
+ const std::shared_ptr<const Input> & to);
+
+ void remove(const std::shared_ptr<const Input> & input);
+};
+
+typedef std::vector<std::shared_ptr<Registry>> Registries;
+
+std::shared_ptr<Registry> getUserRegistry();
+
+Path getUserRegistryPath();
+
+Registries getRegistries(ref<Store> store);
+
+void overrideRegistry(
+ const std::shared_ptr<const Input> & from,
+ const std::shared_ptr<const Input> & to);
+
+std::shared_ptr<const Input> lookupInRegistries(
+ ref<Store> store,
+ std::shared_ptr<const Input> input);
+
+}
diff --git a/src/libstore/fetchers/tarball.cc b/src/libstore/fetchers/tarball.cc
new file mode 100644
index 000000000..fc4d7542b
--- /dev/null
+++ b/src/libstore/fetchers/tarball.cc
@@ -0,0 +1,133 @@
+#include "fetchers.hh"
+#include "download.hh"
+#include "globals.hh"
+#include "parse.hh"
+#include "store-api.hh"
+
+namespace nix::fetchers {
+
+struct TarballInput : Input
+{
+ ParsedURL url;
+ std::optional<Hash> hash;
+
+ TarballInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "tarball"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const TarballInput *>(&other);
+ return
+ other2
+ && to_string() == other2->to_string()
+ && hash == other2->hash;
+ }
+
+ bool isImmutable() const override
+ {
+ return hash || narHash;
+ }
+
+ std::string to_string() const override
+ {
+ auto url2(url);
+ // NAR hashes are preferred over file hashes since tar/zip files
+ // don't have a canonical representation.
+ if (narHash)
+ url2.query.insert_or_assign("narHash", narHash->to_string(SRI));
+ else if (hash)
+ url2.query.insert_or_assign("hash", hash->to_string(SRI));
+ return url2.to_string();
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (narHash)
+ attrs.emplace("narHash", hash->to_string(SRI));
+ else if (hash)
+ attrs.emplace("hash", hash->to_string(SRI));
+ return attrs;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ CachedDownloadRequest request(url.to_string());
+ request.unpack = true;
+ request.getLastModified = true;
+ request.name = "source";
+
+ auto res = getDownloader()->downloadCached(store, request);
+
+ auto input = std::make_shared<TarballInput>(*this);
+
+ auto storePath = store->parseStorePath(res.storePath);
+
+ input->narHash = store->queryPathInfo(storePath)->narHash;
+
+ return {
+ Tree {
+ .actualPath = res.path,
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .lastModified = *res.lastModified,
+ },
+ },
+ input
+ };
+ }
+};
+
+struct TarballInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return nullptr;
+
+ if (!hasSuffix(url.path, ".zip")
+ && !hasSuffix(url.path, ".tar")
+ && !hasSuffix(url.path, ".tar.gz")
+ && !hasSuffix(url.path, ".tar.xz")
+ && !hasSuffix(url.path, ".tar.bz2"))
+ return nullptr;
+
+ auto input = std::make_unique<TarballInput>(url);
+
+ auto hash = url.query.find("hash");
+ if (hash != url.query.end())
+ // FIXME: require SRI hash.
+ input->hash = Hash(hash->second);
+
+ auto narHash = url.query.find("narHash");
+ if (narHash != url.query.end())
+ // FIXME: require SRI hash.
+ input->narHash = Hash(narHash->second);
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Input::Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "hash" && name != "narHash")
+ throw Error("unsupported tarball input attribute '%s'", name);
+
+ auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto hash = maybeGetStrAttr(attrs, "hash"))
+ // FIXME: require SRI hash.
+ input->hash = Hash(*hash);
+ if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
+ // FIXME: require SRI hash.
+ input->narHash = Hash(*narHash);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
+
+}
diff --git a/src/libstore/fetchers/tree-info.hh b/src/libstore/fetchers/tree-info.hh
new file mode 100644
index 000000000..02e92759b
--- /dev/null
+++ b/src/libstore/fetchers/tree-info.hh
@@ -0,0 +1,26 @@
+#pragma once
+
+#include "path.hh"
+
+namespace nix { class Store; }
+
+namespace nix::fetchers {
+
+struct TreeInfo
+{
+ Hash narHash;
+ std::optional<uint64_t> revCount;
+ std::optional<time_t> lastModified;
+
+ bool operator ==(const TreeInfo & other) const
+ {
+ return
+ narHash == other.narHash
+ && revCount == other.revCount
+ && lastModified == other.lastModified;
+ }
+
+ StorePath computeStorePath(Store & store) const;
+};
+
+}
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 782870547..d0500be22 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -356,12 +356,24 @@ public:
Setting<Paths> pluginFiles{this, {}, "plugin-files",
"Plugins to dynamically load at nix initialization time."};
+ Setting<std::string> githubAccessToken{this, "", "github-access-token",
+ "GitHub access token to get access to GitHub data through the GitHub API for github:<..> flakes."};
+
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
bool isExperimentalFeatureEnabled(const std::string & name);
void requireExperimentalFeature(const std::string & name);
+
+ Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
+ "Path or URI of the global flake registry."};
+
+ Setting<bool> allowDirty{this, true, "allow-dirty",
+ "Whether to allow dirty Git/Mercurial trees."};
+
+ Setting<bool> warnDirty{this, true, "warn-dirty",
+ "Whether to warn about dirty Git/Mercurial trees."};
};
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index d4ae36662..011794c62 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -163,10 +163,11 @@ static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
+ static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
if (std::string(uri, 0, 7) != "http://" &&
std::string(uri, 0, 8) != "https://" &&
- (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") != "1" || std::string(uri, 0, 7) != "file://")
- ) return 0;
+ (!forceHttp || std::string(uri, 0, 7) != "file://"))
+ return 0;
auto store = std::make_shared<HttpBinaryCacheStore>(params, uri);
store->init();
return store;
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index b254d766a..a337ad0cc 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -298,9 +298,7 @@ void LocalStore::openDB(State & state, bool create)
/* Open the Nix database. */
string dbPath = dbDir + "/db.sqlite";
auto & db(state.db);
- if (sqlite3_open_v2(dbPath.c_str(), &db.db,
- SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
- throw Error(format("cannot open Nix database '%1%'") % dbPath);
+ state.db = SQLite(dbPath, create);
#ifdef __CYGWIN__
/* The cygwin version of sqlite3 has a patch which calls
@@ -312,11 +310,6 @@ void LocalStore::openDB(State & state, bool create)
SetDllDirectoryW(L"");
#endif
- if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
- throwSQLiteError(db, "setting timeout");
-
- db.exec("pragma foreign_keys = 1");
-
/* !!! check whether sqlite has been built with foreign key
support */
@@ -1270,7 +1263,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, storePathToHash(printStorePath(info->path)));
- dumpPath(toRealPath(printStorePath(i)), *hashSink);
+ dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
if (info->narHash != nullHash && info->narHash != current.first) {
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index ac68c2342..e8cbe422c 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -4,7 +4,7 @@ libstore_NAME = libnixstore
libstore_DIR := $(d)
-libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc)
+libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/fetchers/*.cc)
libstore_LIBS = libutil libnixrust
@@ -60,3 +60,9 @@ $(d)/build.cc:
clean-files += $(d)/schema.sql.gen.hh
$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
+
+$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
+ $(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
+
+$(foreach i, $(wildcard src/libstore/fetchers/*.hh), \
+ $(eval $(call install-file-in, $(i), $(includedir)/nix/fetchers, 0644)))
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 907645d86..442541330 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -78,12 +78,7 @@ public:
state->db = SQLite(dbPath);
- if (sqlite3_busy_timeout(state->db, 60 * 60 * 1000) != SQLITE_OK)
- throwSQLiteError(state->db, "setting timeout");
-
- // We can always reproduce the cache.
- state->db.exec("pragma synchronous = off");
- state->db.exec("pragma main.journal_mode = truncate");
+ state->db.isCache();
state->db.exec(schema);
diff --git a/src/libstore/nar-info-disk-cache.hh b/src/libstore/nar-info-disk-cache.hh
index 878acbb87..04de2c5eb 100644
--- a/src/libstore/nar-info-disk-cache.hh
+++ b/src/libstore/nar-info-disk-cache.hh
@@ -10,7 +10,7 @@ class NarInfoDiskCache
public:
typedef enum { oValid, oInvalid, oUnknown } Outcome;
- virtual ~NarInfoDiskCache() { };
+ virtual ~NarInfoDiskCache() { }
virtual void createCache(const std::string & uri, const Path & storeDir,
bool wantMassQuery, int priority) = 0;
diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc
index d0f289a0f..45c033c66 100644
--- a/src/libstore/parsed-derivations.cc
+++ b/src/libstore/parsed-derivations.cc
@@ -1,5 +1,7 @@
#include "parsed-derivations.hh"
+#include <nlohmann/json.hpp>
+
namespace nix {
ParsedDerivation::ParsedDerivation(StorePath && drvPath, BasicDerivation & drv)
@@ -9,13 +11,15 @@ ParsedDerivation::ParsedDerivation(StorePath && drvPath, BasicDerivation & drv)
auto jsonAttr = drv.env.find("__json");
if (jsonAttr != drv.env.end()) {
try {
- structuredAttrs = nlohmann::json::parse(jsonAttr->second);
+ structuredAttrs = std::make_unique<nlohmann::json>(nlohmann::json::parse(jsonAttr->second));
} catch (std::exception & e) {
throw Error("cannot process __json attribute of '%s': %s", drvPath.to_string(), e.what());
}
}
}
+ParsedDerivation::~ParsedDerivation() { }
+
std::optional<std::string> ParsedDerivation::getStringAttr(const std::string & name) const
{
if (structuredAttrs) {
diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh
index cec868754..f4df5dd54 100644
--- a/src/libstore/parsed-derivations.hh
+++ b/src/libstore/parsed-derivations.hh
@@ -1,6 +1,6 @@
#include "derivations.hh"
-#include <nlohmann/json.hpp>
+#include <nlohmann/json_fwd.hpp>
namespace nix {
@@ -8,15 +8,17 @@ class ParsedDerivation
{
StorePath drvPath;
BasicDerivation & drv;
- std::optional<nlohmann::json> structuredAttrs;
+ std::unique_ptr<nlohmann::json> structuredAttrs;
public:
ParsedDerivation(StorePath && drvPath, BasicDerivation & drv);
- const std::optional<nlohmann::json> & getStructuredAttrs() const
+ ~ParsedDerivation();
+
+ const nlohmann::json * getStructuredAttrs() const
{
- return structuredAttrs;
+ return structuredAttrs.get();
}
std::optional<std::string> getStringAttr(const std::string & name) const;
diff --git a/src/libstore/profiles.cc b/src/libstore/profiles.cc
index dae3f2d32..2bef51878 100644
--- a/src/libstore/profiles.cc
+++ b/src/libstore/profiles.cc
@@ -256,4 +256,22 @@ string optimisticLockProfile(const Path & profile)
}
+Path getDefaultProfile()
+{
+ Path profileLink = getHome() + "/.nix-profile";
+ try {
+ if (!pathExists(profileLink)) {
+ replaceSymlink(
+ getuid() == 0
+ ? settings.nixStateDir + "/profiles/default"
+ : fmt("%s/profiles/per-user/%s/profile", settings.nixStateDir, getUserName()),
+ profileLink);
+ }
+ return absPath(readLink(profileLink), dirOf(profileLink));
+ } catch (Error &) {
+ return profileLink;
+ }
+}
+
+
}
diff --git a/src/libstore/profiles.hh b/src/libstore/profiles.hh
index 5fa1533de..78645d8b6 100644
--- a/src/libstore/profiles.hh
+++ b/src/libstore/profiles.hh
@@ -64,4 +64,8 @@ void lockProfile(PathLocks & lock, const Path & profile);
rebuilt. */
string optimisticLockProfile(const Path & profile);
+/* Resolve ~/.nix-profile. If ~/.nix-profile doesn't exist yet, create
+ it. */
+Path getDefaultProfile();
+
}
diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc
index a061d64f3..eb1daafc5 100644
--- a/src/libstore/sqlite.cc
+++ b/src/libstore/sqlite.cc
@@ -25,11 +25,16 @@ namespace nix {
throw SQLiteError("%s: %s (in '%s')", fs.s, sqlite3_errstr(exterr), path);
}
-SQLite::SQLite(const Path & path)
+SQLite::SQLite(const Path & path, bool create)
{
if (sqlite3_open_v2(path.c_str(), &db,
- SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, 0) != SQLITE_OK)
+ SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
throw Error(format("cannot open SQLite database '%s'") % path);
+
+ if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
+ throwSQLiteError(db, "setting timeout");
+
+ exec("pragma foreign_keys = 1");
}
SQLite::~SQLite()
@@ -42,6 +47,12 @@ SQLite::~SQLite()
}
}
+void SQLite::isCache()
+{
+ exec("pragma synchronous = off");
+ exec("pragma main.journal_mode = truncate");
+}
+
void SQLite::exec(const std::string & stmt)
{
retrySQLite<void>([&]() {
@@ -94,6 +105,16 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (const std::string & value, bool
return *this;
}
+SQLiteStmt::Use & SQLiteStmt::Use::operator () (const unsigned char * data, size_t len, bool notNull)
+{
+ if (notNull) {
+ if (sqlite3_bind_blob(stmt, curArg++, data, len, SQLITE_TRANSIENT) != SQLITE_OK)
+ throwSQLiteError(stmt.db, "binding argument");
+ } else
+ bind();
+ return *this;
+}
+
SQLiteStmt::Use & SQLiteStmt::Use::operator () (int64_t value, bool notNull)
{
if (notNull) {
diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh
index bd012d9b9..fd04c9b07 100644
--- a/src/libstore/sqlite.hh
+++ b/src/libstore/sqlite.hh
@@ -5,8 +5,8 @@
#include "types.hh"
-class sqlite3;
-class sqlite3_stmt;
+struct sqlite3;
+struct sqlite3_stmt;
namespace nix {
@@ -15,13 +15,16 @@ struct SQLite
{
sqlite3 * db = 0;
SQLite() { }
- SQLite(const Path & path);
+ SQLite(const Path & path, bool create = true);
SQLite(const SQLite & from) = delete;
SQLite& operator = (const SQLite & from) = delete;
SQLite& operator = (SQLite && from) { db = from.db; from.db = 0; return *this; }
~SQLite();
operator sqlite3 * () { return db; }
+ /* Disable synchronous mode, set truncate journal mode. */
+ void isCache();
+
void exec(const std::string & stmt);
};
@@ -52,6 +55,7 @@ struct SQLiteStmt
/* Bind the next parameter. */
Use & operator () (const std::string & value, bool notNull = true);
+ Use & operator () (const unsigned char * data, size_t len, bool notNull = true);
Use & operator () (int64_t value, bool notNull = true);
Use & bind(); // null
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index d8f6c22bc..e37829b17 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -6,6 +6,7 @@
#include "thread-pool.hh"
#include "json.hh"
#include "derivations.hh"
+#include "fetchers/parse.hh"
#include <future>
@@ -48,7 +49,7 @@ Path Store::followLinksToStore(std::string_view _path) const
path = absPath(target, dirOf(path));
}
if (!isInStore(path))
- throw Error(format("path '%1%' is not in the Nix store") % path);
+ throw NotInStore("path '%1%' is not in the Nix store", path);
return path;
}
@@ -741,12 +742,7 @@ std::string Store::showPaths(const StorePathSet & paths)
string showPaths(const PathSet & paths)
{
- string s;
- for (auto & i : paths) {
- if (s.size() != 0) s += ", ";
- s += "'" + i + "'";
- }
- return s;
+ return concatStringsSep(", ", quoteStrings(paths));
}
@@ -869,27 +865,7 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
Store::Params params;
auto q = uri.find('?');
if (q != std::string::npos) {
- for (auto s : tokenizeString<Strings>(uri.substr(q + 1), "&")) {
- auto e = s.find('=');
- if (e != std::string::npos) {
- auto value = s.substr(e + 1);
- std::string decoded;
- for (size_t i = 0; i < value.size(); ) {
- if (value[i] == '%') {
- if (i + 2 >= value.size())
- throw Error("invalid URI parameter '%s'", value);
- try {
- decoded += std::stoul(std::string(value, i + 1, 2), 0, 16);
- i += 3;
- } catch (...) {
- throw Error("invalid URI parameter '%s'", value);
- }
- } else
- decoded += value[i++];
- }
- params[s.substr(0, e)] = decoded;
- }
- }
+ params = fetchers::decodeQuery(uri.substr(q + 1));
uri = uri_.substr(0, q);
}
return {uri, params};
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 861b96930..0aaec20d1 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -27,6 +27,7 @@ MakeError(InvalidPath, Error);
MakeError(Unsupported, Error);
MakeError(SubstituteGone, Error);
MakeError(SubstituterDisabled, Error);
+MakeError(NotInStore, Error);
struct BasicDerivation;
@@ -655,6 +656,11 @@ public:
return storePath;
}
+ Path toRealPath(const StorePath & storePath)
+ {
+ return toRealPath(printStorePath(storePath));
+ }
+
virtual void createUser(const std::string & userName, uid_t userId)
{ }