aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--doc/manual/expressions/builtins.xml27
-rw-r--r--doc/manual/release-notes/rl-1.12.xml7
-rw-r--r--mk/tests.mk16
-rw-r--r--release.nix9
-rw-r--r--shell.nix4
-rw-r--r--src/libexpr/common-eval-args.hh2
-rw-r--r--src/libexpr/parser.y7
-rw-r--r--src/libexpr/primops.cc46
-rw-r--r--src/libexpr/primops/fetchGit.cc (renamed from src/libexpr/primops/fetchgit.cc)99
-rw-r--r--src/libexpr/primops/fetchMercurial.cc202
-rw-r--r--src/libexpr/primops/fetchgit.hh23
-rw-r--r--src/libmain/common-args.cc4
-rw-r--r--src/libmain/shared.cc1
-rw-r--r--src/libmain/shared.hh1
-rw-r--r--src/libstore/binary-cache-store.cc82
-rw-r--r--src/libstore/binary-cache-store.hh5
-rw-r--r--src/libstore/build.cc12
-rw-r--r--src/libstore/download.cc4
-rw-r--r--src/libstore/nar-accessor.cc37
-rw-r--r--src/libstore/nar-accessor.hh7
-rw-r--r--src/libstore/s3-binary-cache-store.cc19
-rw-r--r--src/libstore/s3.hh2
-rw-r--r--src/libstore/sandbox-defaults.sb35
-rw-r--r--src/libstore/store-api.cc14
-rw-r--r--src/libutil/archive.cc2
-rw-r--r--src/libutil/archive.hh7
-rw-r--r--src/libutil/hash.hh2
-rw-r--r--src/libutil/util.cc41
-rw-r--r--src/libutil/util.hh23
-rw-r--r--src/nix/ls.cc21
-rw-r--r--src/nix/main.cc4
-rw-r--r--src/nix/path-info.cc2
-rw-r--r--src/nix/search.cc2
-rw-r--r--src/nix/show-config.cc2
-rw-r--r--tests/common.sh.in11
-rw-r--r--tests/fetchGit.sh86
-rw-r--r--tests/fetchMercurial.sh84
-rw-r--r--tests/local.mk7
-rw-r--r--tests/nar-access.nix (renamed from tests/nar-index.nix)0
-rw-r--r--tests/nar-access.sh38
-rw-r--r--tests/nar-index.sh23
-rw-r--r--tests/signing.sh68
42 files changed, 851 insertions, 237 deletions
diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml
index 615314880..5e88b8856 100644
--- a/doc/manual/expressions/builtins.xml
+++ b/doc/manual/expressions/builtins.xml
@@ -264,13 +264,13 @@ stdenv.mkDerivation { … }
Note that when obtaining the hash with <varname>nix-prefetch-url
</varname> the option <varname>--unpack</varname> is required.
- </para>
-
- <para>This function can also verify the contents against a hash.
- In that case, the function takes a set instead of a URL. The set
+ </para>
+
+ <para>This function can also verify the contents against a hash.
+ In that case, the function takes a set instead of a URL. The set
requires the attribute <varname>url</varname> and the attribute
<varname>sha256</varname>, e.g.
-
+
<programlisting>
with import (fetchTarball {
url = https://github.com/NixOS/nixpkgs-channels/archive/nixos-14.12.tar.gz;
@@ -279,7 +279,7 @@ with import (fetchTarball {
stdenv.mkDerivation { … }
</programlisting>
-
+
</para>
<para>This function is not available if <link
@@ -600,6 +600,16 @@ x: x + 456</programlisting>
</varlistentry>
+ <varlistentry><term><function>builtins.isFloat</function>
+ <replaceable>e</replaceable></term>
+
+ <listitem><para>Return <literal>true</literal> if
+ <replaceable>e</replaceable> evaluates to a float, and
+ <literal>false</literal> otherwise.</para></listitem>
+
+ </varlistentry>
+
+
<varlistentry><term><function>builtins.isBool</function>
<replaceable>e</replaceable></term>
@@ -1245,8 +1255,9 @@ stdenv.mkDerivation (rec {
<replaceable>e</replaceable>, namely <literal>"int"</literal>,
<literal>"bool"</literal>, <literal>"string"</literal>,
<literal>"path"</literal>, <literal>"null"</literal>,
- <literal>"set"</literal>, <literal>"list"</literal> or
- <literal>"lambda"</literal>.</para></listitem>
+ <literal>"set"</literal>, <literal>"list"</literal>,
+ <literal>"lambda"</literal> or
+ <literal>"float"</literal>.</para></listitem>
</varlistentry>
diff --git a/doc/manual/release-notes/rl-1.12.xml b/doc/manual/release-notes/rl-1.12.xml
index 7c9a8b75e..29943e3e6 100644
--- a/doc/manual/release-notes/rl-1.12.xml
+++ b/doc/manual/release-notes/rl-1.12.xml
@@ -189,13 +189,6 @@
</listitem>
<listitem>
- <para>Git repositories can now be specified in the Nix search
- path,
- e.g. <literal>nixpkgs=git://github.com/NixOS/nixpkgs</literal>.
- (d8bf0d4859e28ddd23401fbe89f4e528aa09ddb3)</para>
- </listitem>
-
- <listitem>
<para><literal>&lt;nix/fetchurl.nix&gt;</literal> now uses the
content-addressable tarball cache at
<uri>http://tarballs.nixos.org/</uri>, just like
diff --git a/mk/tests.mk b/mk/tests.mk
index 1138857c3..e353d46a0 100644
--- a/mk/tests.mk
+++ b/mk/tests.mk
@@ -12,21 +12,23 @@ installcheck:
@total=0; failed=0; \
red=""; \
green=""; \
+ yellow=""; \
normal=""; \
if [ -t 1 ]; then \
- ncolors="$$(tput colors)"; \
- if [ -n "$$ncolors" ] && [ "$$ncolors" -ge 8 ]; then \
- red="$$(tput setaf 1)"; \
- green="$$(tput setaf 2)"; \
- normal="$$(tput sgr0)"; \
- fi; \
+ red=""; \
+ green=""; \
+ yellow=""; \
+ normal=""; \
fi; \
for i in $(_installcheck-list); do \
total=$$((total + 1)); \
printf "running test $$i..."; \
log="$$(cd $$(dirname $$i) && $(tests-environment) $$(basename $$i) 2>&1)"; \
- if [ $$? -eq 0 ]; then \
+ status=$$?; \
+ if [ $$status -eq 0 ]; then \
echo " [$${green}PASS$$normal]"; \
+ elif [ $$status -eq 99 ]; then \
+ echo " [$${yellow}SKIP$$normal]"; \
else \
echo " [$${red}FAIL$$normal]"; \
echo "$$log" | sed 's/^/ /'; \
diff --git a/release.nix b/release.nix
index a98199258..c76966a1a 100644
--- a/release.nix
+++ b/release.nix
@@ -77,6 +77,9 @@ let
bzip2 xz brotli
openssl pkgconfig sqlite boehmgc
+ # Tests
+ git
+ mercurial
]
++ lib.optional stdenv.isLinux libseccomp
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
@@ -182,7 +185,7 @@ let
buildInputs =
[ curl bzip2 openssl pkgconfig sqlite xz libsodium libseccomp
# These are for "make check" only:
- graphviz libxml2 libxslt
+ graphviz libxml2 libxslt git mercurial
];
configureFlags = ''
@@ -193,7 +196,7 @@ let
doInstallCheck = true;
- lcovFilter = [ "*/boost/*" "*-tab.*" ];
+ lcovFilter = [ "*/boost/*" "*-tab.*" "*/nlohmann/*" "*/linenoise/*" ];
# We call `dot', and even though we just use it to
# syntax-check generated dot files, it still requires some
@@ -332,7 +335,7 @@ let
src = jobs.tarball;
diskImage = (diskImageFun vmTools.diskImageFuns)
{ extraPackages =
- [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" "libseccomp-dev" "ncurses-bin" ]
+ [ "libsqlite3-dev" "libbz2-dev" "libcurl-dev" "libcurl3-nss" "libssl-dev" "liblzma-dev" "libseccomp-dev" ]
++ extraPackages; };
memSize = 1024;
meta.schedulingPriority = 50;
diff --git a/shell.nix b/shell.nix
index 2b4d0dd20..f47952582 100644
--- a/shell.nix
+++ b/shell.nix
@@ -23,6 +23,10 @@ with import ./release-common.nix { inherit pkgs; };
# For nix-perl
perl
perlPackages.DBDSQLite
+
+ # Tests
+ git
+ mercurial
]
++ lib.optional stdenv.isLinux libseccomp;
diff --git a/src/libexpr/common-eval-args.hh b/src/libexpr/common-eval-args.hh
index 09fa406b2..be7fda783 100644
--- a/src/libexpr/common-eval-args.hh
+++ b/src/libexpr/common-eval-args.hh
@@ -6,7 +6,7 @@ namespace nix {
class Store;
class EvalState;
-struct Bindings;
+class Bindings;
struct MixEvalArgs : virtual Args
{
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index eee315228..ef11dd609 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -523,7 +523,6 @@ formal
#include "eval.hh"
#include "download.hh"
#include "store-api.hh"
-#include "primops/fetchgit.hh"
namespace nix {
@@ -665,11 +664,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (isUri(elem.second)) {
try {
- if (hasPrefix(elem.second, "git://") || hasSuffix(elem.second, ".git"))
- // FIXME: support specifying revision/branch
- res = { true, exportGit(store, elem.second, "master").storePath };
- else
- res = { true, getDownloader()->downloadCached(store, elem.second, true) };
+ res = { true, getDownloader()->downloadCached(store, elem.second, true) };
} catch (DownloadError & e) {
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
res = { false, "" };
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index cd0dfbc03..e3b5dfb42 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1009,22 +1009,21 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
}
-struct FilterFromExpr : PathFilter
+static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- EvalState & state;
- Value & filter;
- Pos pos;
+ PathSet context;
+ Path path = state.coerceToPath(pos, *args[1], context);
+ if (!context.empty())
+ throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos);
- FilterFromExpr(EvalState & state, Value & filter, const Pos & pos)
- : state(state), filter(filter), pos(pos)
- {
- }
+ state.forceValue(*args[0]);
+ if (args[0]->type != tLambda)
+ throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
- bool operator () (const Path & path)
- {
- struct stat st;
- if (lstat(path.c_str(), &st))
- throw SysError(format("getting attributes of path '%1%'") % path);
+ path = state.checkSourcePath(path);
+
+ PathFilter filter = [&](const Path & path) {
+ auto st = lstat(path);
/* Call the filter function. The first argument is the path,
the second is a string indicating the type of the file. */
@@ -1032,7 +1031,7 @@ struct FilterFromExpr : PathFilter
mkString(arg1, path);
Value fun2;
- state.callFunction(filter, arg1, fun2, noPos);
+ state.callFunction(*args[0], arg1, fun2, noPos);
Value arg2;
mkString(arg2,
@@ -1045,24 +1044,7 @@ struct FilterFromExpr : PathFilter
state.callFunction(fun2, arg2, res, noPos);
return state.forceBool(res, pos);
- }
-};
-
-
-static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- PathSet context;
- Path path = state.coerceToPath(pos, *args[1], context);
- if (!context.empty())
- throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos);
-
- state.forceValue(*args[0]);
- if (args[0]->type != tLambda)
- throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
-
- FilterFromExpr filter(state, *args[0], pos);
-
- path = state.checkSourcePath(path);
+ };
Path dstPath = settings.readOnlyMode
? state.store->computeStorePathForPath(path, true, htSHA256, filter).first
diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchGit.cc
index 4af530124..bca68ed72 100644
--- a/src/libexpr/primops/fetchgit.cc
+++ b/src/libexpr/primops/fetchGit.cc
@@ -1,4 +1,3 @@
-#include "fetchgit.hh"
#include "primops.hh"
#include "eval-inline.hh"
#include "download.hh"
@@ -15,10 +14,63 @@ using namespace std::string_literals;
namespace nix {
+struct GitInfo
+{
+ Path storePath;
+ std::string rev;
+ std::string shortRev;
+ uint64_t revCount = 0;
+};
+
GitInfo exportGit(ref<Store> store, const std::string & uri,
- const std::string & ref, const std::string & rev,
+ std::experimental::optional<std::string> ref, const std::string & rev,
const std::string & name)
{
+ if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) {
+
+ bool clean = true;
+
+ try {
+ runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" });
+ } catch (ExecError e) {
+ if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
+ clean = false;
+ }
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked
+ files. */
+
+ GitInfo gitInfo;
+ gitInfo.rev = "0000000000000000000000000000000000000000";
+ gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("git", true, { "-C", uri, "ls-files", "-z" }), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, uri));
+ std::string file(p, uri.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto i = files.lower_bound(file);
+ return i != files.end() && hasPrefix(*i, file);
+ }
+
+ return files.count(file);
+ };
+
+ gitInfo.storePath = store->addToStore("source", uri, true, htSHA256, filter);
+
+ return gitInfo;
+ }
+ }
+
+ if (!ref) ref = "master"s;
+
if (rev != "") {
std::regex revRegex("^[0-9a-fA-F]{40}$");
if (!std::regex_match(rev, revRegex))
@@ -32,7 +84,7 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
runProgram("git", true, { "init", "--bare", cacheDir });
}
- std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, ref)).to_string(Base32, false);
+ std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, *ref)).to_string(Base32, false);
Path localRefFile = cacheDir + "/refs/heads/" + localRef;
@@ -41,21 +93,27 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
time_t now = time(0);
struct stat st;
if (stat(localRefFile.c_str(), &st) != 0 ||
- st.st_mtime < now - settings.tarballTtl)
+ st.st_mtime <= now - settings.tarballTtl)
{
- Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri));
-
- // FIXME: git stderr messes up our progress indicator, so
- // we're using --quiet for now. Should process its stderr.
- runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, ref + ":" + localRef });
-
- struct timeval times[2];
- times[0].tv_sec = now;
- times[0].tv_usec = 0;
- times[1].tv_sec = now;
- times[1].tv_usec = 0;
-
- utimes(localRefFile.c_str(), times);
+ if (rev == "" ||
+ chomp(runProgram(
+ RunOptions("git", { "-C", cacheDir, "cat-file", "-t", rev })
+ .killStderr(true)).second) != "commit")
+ {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri));
+
+ // FIXME: git stderr messes up our progress indicator, so
+ // we're using --quiet for now. Should process its stderr.
+ runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, *ref + ":" + localRef });
+
+ struct timeval times[2];
+ times[0].tv_sec = now;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = now;
+ times[1].tv_usec = 0;
+
+ utimes(localRefFile.c_str(), times);
+ }
}
// FIXME: check whether rev is an ancestor of ref.
@@ -67,10 +125,9 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + gitInfo.rev).to_string(Base32, false);
Path storeLink = cacheDir + "/" + storeLinkName + ".link";
- PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink));
+ PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink)); // FIXME: broken
try {
- // FIXME: doesn't handle empty lines
auto json = nlohmann::json::parse(readFile(storeLink));
assert(json["name"] == name && json["rev"] == gitInfo.rev);
@@ -114,7 +171,7 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
std::string url;
- std::string ref = "master";
+ std::experimental::optional<std::string> ref;
std::string rev;
std::string name = "source";
PathSet context;
@@ -145,7 +202,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
} else
url = state.coerceToString(pos, *args[0], context, false, false);
- if (hasPrefix(url, "/")) url = "file://" + url;
+ if (!isUri(url)) url = absPath(url);
// FIXME: git externals probably can be used to bypass the URI
// whitelist. Ah well.
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
new file mode 100644
index 000000000..7def7103b
--- /dev/null
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -0,0 +1,202 @@
+#include "primops.hh"
+#include "eval-inline.hh"
+#include "download.hh"
+#include "store-api.hh"
+#include "pathlocks.hh"
+
+#include <sys/time.h>
+
+#include <regex>
+
+#include <nlohmann/json.hpp>
+
+using namespace std::string_literals;
+
+namespace nix {
+
+struct HgInfo
+{
+ Path storePath;
+ std::string branch;
+ std::string rev;
+ uint64_t revCount = 0;
+};
+
+std::regex commitHashRegex("^[0-9a-fA-F]{40}$");
+
+HgInfo exportMercurial(ref<Store> store, const std::string & uri,
+ std::string rev, const std::string & name)
+{
+ if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) {
+
+ bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == "";
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked
+ files. */
+
+ printTalkative("copying unclean Mercurial working tree '%s'", uri);
+
+ HgInfo hgInfo;
+ hgInfo.rev = "0000000000000000000000000000000000000000";
+ hgInfo.branch = chomp(runProgram("hg", true, { "branch", "-R", uri }));
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("hg", true, { "status", "-R", uri, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, uri));
+ std::string file(p, uri.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto i = files.lower_bound(file);
+ return i != files.end() && hasPrefix(*i, file);
+ }
+
+ return files.count(file);
+ };
+
+ hgInfo.storePath = store->addToStore("source", uri, true, htSHA256, filter);
+
+ return hgInfo;
+ }
+ }
+
+ if (rev == "") rev = "default";
+
+ Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, uri).to_string(Base32, false));
+
+ Path stampFile = fmt("%s/.hg/%s.stamp", cacheDir, hashString(htSHA512, rev).to_string(Base32, false));
+
+ /* If we haven't pulled this repo less than ‘tarball-ttl’ seconds,
+ do so now. */
+ time_t now = time(0);
+ struct stat st;
+ if (stat(stampFile.c_str(), &st) != 0 ||
+ st.st_mtime <= now - settings.tarballTtl)
+ {
+ /* Except that if this is a commit hash that we already have,
+ we don't have to pull again. */
+ if (!(std::regex_match(rev, commitHashRegex)
+ && pathExists(cacheDir)
+ && runProgram(
+ RunOptions("hg", { "log", "-R", cacheDir, "-r", rev, "--template", "1" })
+ .killStderr(true)).second == "1"))
+ {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", uri));
+
+ if (pathExists(cacheDir)) {
+ runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
+ } else {
+ createDirs(dirOf(cacheDir));
+ runProgram("hg", true, { "clone", "--noupdate", "--", uri, cacheDir });
+ }
+ }
+
+ writeFile(stampFile, "");
+ }
+
+ auto tokens = tokenizeString<std::vector<std::string>>(
+ runProgram("hg", true, { "log", "-R", cacheDir, "-r", rev, "--template", "{node} {rev} {branch}" }));
+ assert(tokens.size() == 3);
+
+ HgInfo hgInfo;
+ hgInfo.rev = tokens[0];
+ hgInfo.revCount = std::stoull(tokens[1]);
+ hgInfo.branch = tokens[2];
+
+ std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + hgInfo.rev).to_string(Base32, false);
+ Path storeLink = fmt("%s/.hg/%s.link", cacheDir, storeLinkName);
+
+ try {
+ auto json = nlohmann::json::parse(readFile(storeLink));
+
+ assert(json["name"] == name && json["rev"] == hgInfo.rev);
+
+ hgInfo.storePath = json["storePath"];
+
+ if (store->isValidPath(hgInfo.storePath)) {
+ printTalkative("using cached Mercurial store path '%s'", hgInfo.storePath);
+ return hgInfo;
+ }
+
+ } catch (SysError & e) {
+ if (e.errNo != ENOENT) throw;
+ }
+
+ Path tmpDir = createTempDir();
+ AutoDelete delTmpDir(tmpDir, true);
+
+ runProgram("hg", true, { "archive", "-R", cacheDir, "-r", rev, tmpDir });
+
+ deletePath(tmpDir + "/.hg_archival.txt");
+
+ hgInfo.storePath = store->addToStore(name, tmpDir);
+
+ nlohmann::json json;
+ json["storePath"] = hgInfo.storePath;
+ json["uri"] = uri;
+ json["name"] = name;
+ json["branch"] = hgInfo.branch;
+ json["rev"] = hgInfo.rev;
+ json["revCount"] = hgInfo.revCount;
+
+ writeFile(storeLink, json.dump());
+
+ return hgInfo;
+}
+
+static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ std::string url;
+ std::string rev;
+ std::string name = "source";
+ PathSet context;
+
+ state.forceValue(*args[0]);
+
+ if (args[0]->type == tAttrs) {
+
+ state.forceAttrs(*args[0], pos);
+
+ for (auto & attr : *args[0]->attrs) {
+ string n(attr.name);
+ if (n == "url")
+ url = state.coerceToString(*attr.pos, *attr.value, context, false, false);
+ else if (n == "rev")
+ rev = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "name")
+ name = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else
+ throw EvalError("unsupported argument '%s' to 'fetchMercurial', at %s", attr.name, *attr.pos);
+ }
+
+ if (url.empty())
+ throw EvalError(format("'url' argument required, at %1%") % pos);
+
+ } else
+ url = state.coerceToString(pos, *args[0], context, false, false);
+
+ if (!isUri(url)) url = absPath(url);
+
+ // FIXME: git externals probably can be used to bypass the URI
+ // whitelist. Ah well.
+ state.checkURI(url);
+
+ auto hgInfo = exportMercurial(state.store, url, rev, name);
+
+ state.mkAttrs(v, 8);
+ mkString(*state.allocAttr(v, state.sOutPath), hgInfo.storePath, PathSet({hgInfo.storePath}));
+ mkString(*state.allocAttr(v, state.symbols.create("branch")), hgInfo.branch);
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), hgInfo.rev);
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(hgInfo.rev, 0, 12));
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), hgInfo.revCount);
+ v.attrs->sort();
+}
+
+static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial);
+
+}
diff --git a/src/libexpr/primops/fetchgit.hh b/src/libexpr/primops/fetchgit.hh
deleted file mode 100644
index 056b6fcbe..000000000
--- a/src/libexpr/primops/fetchgit.hh
+++ /dev/null
@@ -1,23 +0,0 @@
-#pragma once
-
-#include <string>
-
-#include "util.hh"
-
-namespace nix {
-
-class Store;
-
-struct GitInfo
-{
- Path storePath;
- std::string rev;
- std::string shortRev;
- uint64_t revCount = 0;
-};
-
-GitInfo exportGit(ref<Store> store, const std::string & uri,
- const std::string & ref, const std::string & rev = "",
- const std::string & name = "");
-
-}
diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc
index ea27aaa35..d3aac6aba 100644
--- a/src/libmain/common-args.cc
+++ b/src/libmain/common-args.cc
@@ -34,6 +34,10 @@ MixCommonArgs::MixCommonArgs(const string & programName)
warn(e.what());
}
});
+
+ std::string cat = "config";
+ settings.convertToArgs(*this, cat);
+ hiddenCategories.insert(cat);
}
}
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 0f599f388..85d3c077b 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -369,5 +369,6 @@ PrintFreed::~PrintFreed()
% showBytes(results.bytesFreed);
}
+Exit::~Exit() { }
}
diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh
index 9219dbed8..1dcc4f0ac 100644
--- a/src/libmain/shared.hh
+++ b/src/libmain/shared.hh
@@ -17,6 +17,7 @@ public:
int status;
Exit() : status(0) { }
Exit(int status) : status(status) { }
+ virtual ~Exit();
};
int handleExceptions(const string & programName, std::function<void()> fun);
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 67607ab3d..68af85bf1 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -73,6 +73,23 @@ Path BinaryCacheStore::narInfoFileFor(const Path & storePath)
return storePathToHash(storePath) + ".narinfo";
}
+void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
+{
+ auto narInfoFile = narInfoFileFor(narInfo->path);
+
+ upsertFile(narInfoFile, narInfo->to_string(), "text/x-nix-narinfo");
+
+ auto hashPart = storePathToHash(narInfo->path);
+
+ {
+ auto state_(state.lock());
+ state_->pathInfoCache.upsert(hashPart, std::shared_ptr<NarInfo>(narInfo));
+ }
+
+ if (diskCache)
+ diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
+}
+
void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
{
@@ -89,8 +106,6 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::str
% info.path % ref);
}
- auto narInfoFile = narInfoFileFor(info.path);
-
assert(nar->compare(0, narMagic.size(), narMagic) == 0);
auto narInfo = make_ref<NarInfo>(info);
@@ -119,42 +134,9 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::str
accessor_->addToCache(info.path, *nar);
}
- std::function<void(const Path &, JSONPlaceholder &)> recurse;
-
- recurse = [&](const Path & path, JSONPlaceholder & res) {
- auto st = narAccessor->stat(path);
-
- auto obj = res.object();
-
- switch (st.type) {
- case FSAccessor::Type::tRegular:
- obj.attr("type", "regular");
- obj.attr("size", st.fileSize);
- if (st.isExecutable)
- obj.attr("executable", true);
- break;
- case FSAccessor::Type::tDirectory:
- obj.attr("type", "directory");
- {
- auto res2 = obj.object("entries");
- for (auto & name : narAccessor->readDirectory(path)) {
- auto res3 = res2.placeholder(name);
- recurse(path + "/" + name, res3);
- }
- }
- break;
- case FSAccessor::Type::tSymlink:
- obj.attr("type", "symlink");
- obj.attr("target", narAccessor->readLink(path));
- break;
- default:
- abort();
- }
- };
-
{
auto res = jsonRoot.placeholder("root");
- recurse("", res);
+ listNar(res, narAccessor, "", true);
}
}
@@ -201,17 +183,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::str
/* Atomically write the NAR info file.*/
if (secretKey) narInfo->sign(*secretKey);
- upsertFile(narInfoFile, narInfo->to_string(), "text/x-nix-narinfo");
-
- auto hashPart = storePathToHash(narInfo->path);
-
- {
- auto state_(state.lock());
- state_->pathInfoCache.upsert(hashPart, std::shared_ptr<NarInfo>(narInfo));
- }
-
- if (diskCache)
- diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
+ writeNarInfo(narInfo);
stats.narInfoWrite++;
}
@@ -326,6 +298,22 @@ ref<FSAccessor> BinaryCacheStore::getFSAccessor()
return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()), localNarCache);
}
+void BinaryCacheStore::addSignatures(const Path & storePath, const StringSet & sigs)
+{
+ /* Note: this is inherently racy since there is no locking on
+ binary caches. In particular, with S3 this unreliable, even
+ when addSignatures() is called sequentially on a path, because
+ S3 might return an outdated cached version. */
+
+ auto narInfo = make_ref<NarInfo>((NarInfo &) *queryPathInfo(storePath));
+
+ narInfo->sigs.insert(sigs.begin(), sigs.end());
+
+ auto narInfoFile = narInfoFileFor(narInfo->path);
+
+ writeNarInfo(narInfo);
+}
+
std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const Path & path)
{
Path drvPath;
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index d3b0e0bd9..8492ff600 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -59,6 +59,8 @@ private:
std::string narInfoFileFor(const Path & storePath);
+ void writeNarInfo(ref<NarInfo> narInfo);
+
public:
bool isValidPathUncached(const Path & path) override;
@@ -119,8 +121,7 @@ public:
ref<FSAccessor> getFSAccessor() override;
- void addSignatures(const Path & storePath, const StringSet & sigs) override
- { unsupported(); }
+ void addSignatures(const Path & storePath, const StringSet & sigs) override;
std::shared_ptr<std::string> getBuildLog(const Path & path) override;
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 9f8edc826..061682377 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -2833,10 +2833,10 @@ void DerivationGoal::runChild()
sandboxProfile += "(deny default (with no-log))\n";
}
- sandboxProfile += "(import \"sandbox-defaults.sb\")";
+ sandboxProfile += "(import \"sandbox-defaults.sb\")\n";
if (fixedOutput)
- sandboxProfile += "(import \"sandbox-network.sb\")";
+ sandboxProfile += "(import \"sandbox-network.sb\")\n";
/* Our rwx outputs */
sandboxProfile += "(allow file-read* file-write* process-exec\n";
@@ -2879,7 +2879,7 @@ void DerivationGoal::runChild()
sandboxProfile += additionalSandboxProfile;
} else
- sandboxProfile += "(import \"sandbox-minimal.sb\")";
+ sandboxProfile += "(import \"sandbox-minimal.sb\")\n";
debug("Generated sandbox profile:");
debug(sandboxProfile);
@@ -2888,6 +2888,8 @@ void DerivationGoal::runChild()
writeFile(sandboxFile, sandboxProfile);
+ bool allowLocalNetworking = get(drv->env, "__darwinAllowLocalNetworking") == "1";
+
/* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms
to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */
Path globalTmpDir = canonPath(getEnv("TMPDIR", "/tmp"), true);
@@ -2903,6 +2905,10 @@ void DerivationGoal::runChild()
args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir);
args.push_back("-D");
args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/");
+ if (allowLocalNetworking) {
+ args.push_back("-D");
+ args.push_back(string("_ALLOW_LOCAL_NETWORKING=1"));
+ }
args.push_back(drv->builder);
}
#endif
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index 579a5e8c1..da31029b4 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -533,7 +533,7 @@ struct CurlDownloader : public Downloader
// FIXME: do this on a worker thread
sync2async<DownloadResult>(success, failure, [&]() -> DownloadResult {
#ifdef ENABLE_S3
- S3Helper s3Helper(Aws::Region::US_EAST_1); // FIXME: make configurable
+ S3Helper s3Helper("", Aws::Region::US_EAST_1); // FIXME: make configurable
auto slash = request.uri.find('/', 5);
if (slash == std::string::npos)
throw nix::Error("bad S3 URI '%s'", request.uri);
@@ -707,7 +707,7 @@ bool isUri(const string & s)
size_t pos = s.find("://");
if (pos == string::npos) return false;
string scheme(s, 0, pos);
- return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3";
+ return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
}
diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc
index 2afdeb021..839a7991c 100644
--- a/src/libstore/nar-accessor.cc
+++ b/src/libstore/nar-accessor.cc
@@ -1,5 +1,6 @@
#include "nar-accessor.hh"
#include "archive.hh"
+#include "json.hh"
#include <map>
#include <stack>
@@ -181,4 +182,40 @@ ref<FSAccessor> makeNarAccessor(ref<const std::string> nar)
return make_ref<NarAccessor>(nar);
}
+void listNar(JSONPlaceholder & res, ref<FSAccessor> accessor,
+ const Path & path, bool recurse)
+{
+ auto st = accessor->stat(path);
+
+ auto obj = res.object();
+
+ switch (st.type) {
+ case FSAccessor::Type::tRegular:
+ obj.attr("type", "regular");
+ obj.attr("size", st.fileSize);
+ if (st.isExecutable)
+ obj.attr("executable", true);
+ break;
+ case FSAccessor::Type::tDirectory:
+ obj.attr("type", "directory");
+ {
+ auto res2 = obj.object("entries");
+ for (auto & name : accessor->readDirectory(path)) {
+ if (recurse) {
+ auto res3 = res2.placeholder(name);
+ listNar(res3, accessor, path + "/" + name, true);
+ } else
+ res2.object(name);
+ }
+ }
+ break;
+ case FSAccessor::Type::tSymlink:
+ obj.attr("type", "symlink");
+ obj.attr("target", accessor->readLink(path));
+ break;
+ default:
+ throw Error("path '%s' does not exist in NAR", path);
+ }
+}
+
}
diff --git a/src/libstore/nar-accessor.hh b/src/libstore/nar-accessor.hh
index 83c570be4..ed8fe15ca 100644
--- a/src/libstore/nar-accessor.hh
+++ b/src/libstore/nar-accessor.hh
@@ -8,4 +8,11 @@ namespace nix {
file. */
ref<FSAccessor> makeNarAccessor(ref<const std::string> nar);
+class JSONPlaceholder;
+
+/* Write a JSON representation of the contents of a NAR (except file
+ contents). */
+void listNar(JSONPlaceholder & res, ref<FSAccessor> accessor,
+ const Path & path, bool recurse);
+
}
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 5fc7371a5..0079da1be 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -10,6 +10,8 @@
#include "istringstream_nocopy.hh"
#include <aws/core/Aws.h>
+#include <aws/core/auth/AWSCredentialsProvider.h>
+#include <aws/core/auth/AWSCredentialsProviderChain.h>
#include <aws/core/client/ClientConfiguration.h>
#include <aws/core/client/DefaultRetryStrategy.h>
#include <aws/core/utils/logging/FormattedLogSystem.h>
@@ -77,9 +79,15 @@ static void initAWS()
});
}
-S3Helper::S3Helper(const string & region)
+S3Helper::S3Helper(const std::string & profile, const std::string & region)
: config(makeConfig(region))
- , client(make_ref<Aws::S3::S3Client>(*config, true, false))
+ , client(make_ref<Aws::S3::S3Client>(
+ profile == ""
+ ? std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
+ std::make_shared<Aws::Auth::DefaultAWSCredentialsProviderChain>())
+ : std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>(
+ std::make_shared<Aws::Auth::ProfileConfigFileAWSCredentialsProvider>(profile.c_str())),
+ *config, true, false))
{
}
@@ -148,6 +156,7 @@ S3Helper::DownloadResult S3Helper::getObject(
struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
{
+ const Setting<std::string> profile{this, "", "profile", "The name of the AWS configuration profile to use."};
const Setting<std::string> region{this, Aws::Region::US_EAST_1, "region", {"aws-region"}};
const Setting<std::string> narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"};
const Setting<std::string> lsCompression{this, "", "ls-compression", "compression method for .ls files"};
@@ -163,7 +172,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
const Params & params, const std::string & bucketName)
: S3BinaryCacheStore(params)
, bucketName(bucketName)
- , s3Helper(region)
+ , s3Helper(profile, region)
{
diskCache = getNarInfoDiskCache();
}
@@ -241,8 +250,8 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
auto & error = res.GetError();
if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND
|| error.GetErrorType() == Aws::S3::S3Errors::NO_SUCH_KEY
- || (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN // FIXME
- && error.GetMessage().find("404") != std::string::npos))
+ // If bucket listing is disabled, 404s turn into 403s
+ || error.GetErrorType() == Aws::S3::S3Errors::ACCESS_DENIED)
return false;
throw Error(format("AWS error fetching '%s': %s") % path % error.GetMessage());
}
diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh
index 08a7fbf96..4f9964003 100644
--- a/src/libstore/s3.hh
+++ b/src/libstore/s3.hh
@@ -14,7 +14,7 @@ struct S3Helper
ref<Aws::Client::ClientConfiguration> config;
ref<Aws::S3::S3Client> client;
- S3Helper(const std::string & region);
+ S3Helper(const std::string & profile, const std::string & region);
ref<Aws::Client::ClientConfiguration> makeConfig(const std::string & region);
diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb
index d63c8f813..0299d1ee4 100644
--- a/src/libstore/sandbox-defaults.sb
+++ b/src/libstore/sandbox-defaults.sb
@@ -21,8 +21,15 @@
; Allow sending signals within the sandbox.
(allow signal (target same-sandbox))
+; Allow getpwuid.
+(allow mach-lookup (global-name "com.apple.system.opendirectoryd.libinfo"))
+
; Access to /tmp.
-(allow file* process-exec (literal "/tmp") (subpath TMPDIR))
+; The network-outbound/network-inbound ones are for unix domain sockets, which
+; we allow access to in TMPDIR (but if we allow them more broadly, you could in
+; theory escape the sandbox)
+(allow file* process-exec network-outbound network-inbound
+ (literal "/tmp") (subpath TMPDIR))
; Some packages like to read the system version.
(allow file-read* (literal "/System/Library/CoreServices/SystemVersion.plist"))
@@ -30,6 +37,29 @@
; Without this line clang cannot write to /dev/null, breaking some configure tests.
(allow file-read-metadata (literal "/dev"))
+; Many packages like to do local networking in their test suites, but let's only
+; allow it if the package explicitly asks for it.
+(if (param "_ALLOW_LOCAL_NETWORKING")
+ (begin
+ (allow network* (local ip) (local tcp) (local udp))
+
+ ; Allow access to /etc/resolv.conf (which is a symlink to
+ ; /private/var/run/resolv.conf).
+ ; TODO: deduplicate with sandbox-network.sb
+ (allow file-read-metadata
+ (literal "/var")
+ (literal "/etc")
+ (literal "/etc/resolv.conf")
+ (literal "/private/etc/resolv.conf"))
+
+ (allow file-read*
+ (literal "/private/var/run/resolv.conf"))
+
+ ; Allow DNS lookups. This is even needed for localhost, which lots of tests rely on
+ (allow file-read-metadata (literal "/etc/hosts"))
+ (allow file-read* (literal "/private/etc/hosts"))
+ (allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder")))))
+
; Standard devices.
(allow file*
(literal "/dev/null")
@@ -54,5 +84,4 @@
(allow file-read-metadata
(literal "/etc")
(literal "/var")
- (literal "/private/var/tmp")
- )
+ (literal "/private/var/tmp"))
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index c57e42fec..8146513a4 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -389,8 +389,10 @@ PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubsti
Sync<State> state_(State{paths.size(), PathSet()});
std::condition_variable wakeup;
+ ThreadPool pool;
- for (auto & path : paths)
+ auto doQuery = [&](const Path & path ) {
+ checkInterrupt();
queryPathInfo(path,
[path, &state_, &wakeup](ref<ValidPathInfo> info) {
auto state(state_.lock());
@@ -411,6 +413,12 @@ PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubsti
if (!--state->left)
wakeup.notify_one();
});
+ };
+
+ for (auto & path : paths)
+ pool.enqueue(std::bind(doQuery, path));
+
+ pool.process();
while (true) {
auto state(state_.lock());
@@ -569,9 +577,9 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
auto dstUri = dstStore->getUri();
Activity act(*logger, lvlInfo, actCopyPath,
- srcUri == "local"
+ srcUri == "local" || srcUri == "daemon"
? fmt("copying path '%s' to '%s'", storePath, dstUri)
- : dstUri == "local"
+ : dstUri == "local" || dstUri == "daemon"
? fmt("copying path '%s' from '%s'", storePath, srcUri)
: fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri),
{storePath, srcUri, dstUri});
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index ea1deb924..f71229d8f 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -29,7 +29,7 @@ const std::string narVersionMagic1 = "nix-archive-1";
static string caseHackSuffix = "~nix~case~hack~";
-PathFilter defaultPathFilter;
+PathFilter defaultPathFilter = [](const Path &) { return true; };
static void dumpContents(const Path & path, size_t size,
diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh
index 607ebf8b2..8a15e849c 100644
--- a/src/libutil/archive.hh
+++ b/src/libutil/archive.hh
@@ -44,13 +44,6 @@ namespace nix {
`+' denotes string concatenation. */
-struct PathFilter
-{
- virtual ~PathFilter() { }
- virtual bool operator () (const Path & path) { return true; }
-};
-
-extern PathFilter defaultPathFilter;
void dumpPath(const Path & path, Sink & sink,
PathFilter & filter = defaultPathFilter);
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index d83049b02..fd7a61df8 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -93,8 +93,6 @@ Hash hashFile(HashType ht, const Path & path);
/* Compute the hash of the given path. The hash is defined as
(essentially) hashString(ht, dumpPath(path)). */
-struct PathFilter;
-extern PathFilter defaultPathFilter;
typedef std::pair<Hash, unsigned long long> HashResult;
HashResult hashPath(HashType ht, const Path & path,
PathFilter & filter = defaultPathFilter);
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 16c2c97aa..96c0cd783 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -895,31 +895,45 @@ std::vector<char *> stringsToCharPtrs(const Strings & ss)
string runProgram(Path program, bool searchPath, const Strings & args,
const std::experimental::optional<std::string> & input)
{
+ RunOptions opts(program, args);
+ opts.searchPath = searchPath;
+ opts.input = input;
+
+ auto res = runProgram(opts);
+
+ if (!statusOk(res.first))
+ throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first)));
+
+ return res.second;
+}
+
+std::pair<int, std::string> runProgram(const RunOptions & options)
+{
checkInterrupt();
/* Create a pipe. */
Pipe out, in;
out.create();
- if (input) in.create();
+ if (options.input) in.create();
/* Fork. */
Pid pid = startProcess([&]() {
if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1)
throw SysError("dupping stdout");
- if (input && dup2(in.readSide.get(), STDIN_FILENO) == -1)
+ if (options.input && dup2(in.readSide.get(), STDIN_FILENO) == -1)
throw SysError("dupping stdin");
- Strings args_(args);
- args_.push_front(program);
+ Strings args_(options.args);
+ args_.push_front(options.program);
restoreSignals();
- if (searchPath)
- execvp(program.c_str(), stringsToCharPtrs(args_).data());
+ if (options.searchPath)
+ execvp(options.program.c_str(), stringsToCharPtrs(args_).data());
else
- execv(program.c_str(), stringsToCharPtrs(args_).data());
+ execv(options.program.c_str(), stringsToCharPtrs(args_).data());
- throw SysError(format("executing '%1%'") % program);
+ throw SysError("executing '%1%'", options.program);
});
out.writeSide = -1;
@@ -934,11 +948,11 @@ string runProgram(Path program, bool searchPath, const Strings & args,
});
- if (input) {
+ if (options.input) {
in.readSide = -1;
writerThread = std::thread([&]() {
try {
- writeFull(in.writeSide.get(), *input);
+ writeFull(in.writeSide.get(), *options.input);
promise.set_value();
} catch (...) {
promise.set_exception(std::current_exception());
@@ -951,14 +965,11 @@ string runProgram(Path program, bool searchPath, const Strings & args,
/* Wait for the child to finish. */
int status = pid.wait();
- if (!statusOk(status))
- throw ExecError(status, format("program '%1%' %2%")
- % program % statusToString(status));
/* Wait for the writer thread to finish. */
- if (input) promise.get_future().get();
+ if (options.input) promise.get_future().get();
- return result;
+ return {status, result};
}
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index fccf5d854..a3494e09b 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -245,6 +245,23 @@ string runProgram(Path program, bool searchPath = false,
const Strings & args = Strings(),
const std::experimental::optional<std::string> & input = {});
+struct RunOptions
+{
+ Path program;
+ bool searchPath = true;
+ Strings args;
+ std::experimental::optional<std::string> input;
+ bool _killStderr = false;
+
+ RunOptions(const Path & program, const Strings & args)
+ : program(program), args(args) { };
+
+ RunOptions & killStderr(bool v) { _killStderr = true; return *this; }
+};
+
+std::pair<int, std::string> runProgram(const RunOptions & options);
+
+
class ExecError : public Error
{
public:
@@ -481,4 +498,10 @@ struct MaintainCount
std::pair<unsigned short, unsigned short> getWindowSize();
+/* Used in various places. */
+typedef std::function<bool(const Path & path)> PathFilter;
+
+extern PathFilter defaultPathFilter;
+
+
}
diff --git a/src/nix/ls.cc b/src/nix/ls.cc
index 5a5fa8f62..69620595d 100644
--- a/src/nix/ls.cc
+++ b/src/nix/ls.cc
@@ -2,10 +2,12 @@
#include "store-api.hh"
#include "fs-accessor.hh"
#include "nar-accessor.hh"
+#include "common-args.hh"
+#include "json.hh"
using namespace nix;
-struct MixLs : virtual Args
+struct MixLs : virtual Args, MixJSON
{
std::string path;
@@ -20,7 +22,7 @@ struct MixLs : virtual Args
mkFlag('d', "directory", "show directories rather than their contents", &showDirectory);
}
- void list(ref<FSAccessor> accessor)
+ void listText(ref<FSAccessor> accessor)
{
std::function<void(const FSAccessor::Stat &, const Path &, const std::string &, bool)> doPath;
@@ -61,10 +63,6 @@ struct MixLs : virtual Args
showFile(curPath, relPath);
};
- if (path == "/") {
- path = "";
- }
-
auto st = accessor->stat(path);
if (st.type == FSAccessor::Type::tMissing)
throw Error(format("path '%1%' does not exist") % path);
@@ -72,6 +70,17 @@ struct MixLs : virtual Args
st.type == FSAccessor::Type::tDirectory ? "." : baseNameOf(path),
showDirectory);
}
+
+ void list(ref<FSAccessor> accessor)
+ {
+ if (path == "/") path = "";
+
+ if (json) {
+ JSONPlaceholder jsonRoot(std::cout);
+ listNar(jsonRoot, accessor, path, recursive);
+ } else
+ listText(accessor);
+ }
};
struct CmdLsStore : StoreCommand, MixLs
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 060402cd0..06bb8a1c3 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -43,10 +43,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
.longName("version")
.description("show version information")
.handler([&]() { printVersion(programName); });
-
- std::string cat = "config";
- settings.convertToArgs(*this, cat);
- hiddenCategories.insert(cat);
}
void printFlags(std::ostream & out) override
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index ca02a4c92..47caa401d 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -65,7 +65,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
pathLen = std::max(pathLen, storePath.size());
if (json) {
- JSONPlaceholder jsonRoot(std::cout, true);
+ JSONPlaceholder jsonRoot(std::cout);
store->pathInfoToJSON(jsonRoot,
// FIXME: preserve order?
PathSet(storePaths.begin(), storePaths.end()),
diff --git a/src/nix/search.cc b/src/nix/search.cc
index f458367dc..a9dc2d6b9 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -84,7 +84,7 @@ struct CmdSearch : SourceExprCommand, MixJSON
bool first = true;
- auto jsonOut = json ? std::make_unique<JSONObject>(std::cout, true) : nullptr;
+ auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
auto sToplevel = state->symbols.create("_toplevel");
auto sRecurse = state->symbols.create("recurseForDerivations");
diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc
index c628c2898..c64b12c8d 100644
--- a/src/nix/show-config.cc
+++ b/src/nix/show-config.cc
@@ -26,7 +26,7 @@ struct CmdShowConfig : Command, MixJSON
{
if (json) {
// FIXME: use appropriate JSON types (bool, ints, etc).
- JSONObject jsonObj(std::cout, true);
+ JSONObject jsonObj(std::cout);
settings.toJSON(jsonObj);
} else {
for (auto & s : settings.getSettings())
diff --git a/tests/common.sh.in b/tests/common.sh.in
index 04d605e34..ca6df2536 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -90,4 +90,15 @@ fail() {
exit 1
}
+expect() {
+ local expected res
+ expected="$1"
+ shift
+ set +e
+ "$@"
+ res="$?"
+ set -e
+ [[ $res -eq $expected ]]
+}
+
set -x
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
new file mode 100644
index 000000000..f13de89f7
--- /dev/null
+++ b/tests/fetchGit.sh
@@ -0,0 +1,86 @@
+source common.sh
+
+if [[ -z $(type -p git) ]]; then
+ echo "Git not installed; skipping Git tests"
+ exit 99
+fi
+
+clearStore
+
+repo=$TEST_ROOT/git
+
+rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix/git
+
+git init $repo
+git -C $repo config user.email "foobar@example.com"
+git -C $repo config user.name "Foobar"
+
+echo utrecht > $repo/hello
+git -C $repo add hello
+git -C $repo commit -m 'Bla1'
+rev1=$(git -C $repo rev-parse HEAD)
+
+echo world > $repo/hello
+git -C $repo commit -m 'Bla2' -a
+rev2=$(git -C $repo rev-parse HEAD)
+
+# Fetch the default branch.
+path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
+[[ $(cat $path/hello) = world ]]
+
+# Fetch using an explicit revision hash.
+path2=$(nix eval --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
+[[ $path = $path2 ]]
+
+# Fetch again. This should be cached.
+mv $repo ${repo}-tmp
+path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
+[[ $path = $path2 ]]
+
+[[ $(nix eval "(builtins.fetchGit file://$repo).revCount") = 2 ]]
+[[ $(nix eval --raw "(builtins.fetchGit file://$repo).rev") = $rev2 ]]
+
+# But with TTL 0, it should fail.
+(! nix eval --tarball-ttl 0 "(builtins.fetchGit file://$repo)" -vvvvv)
+
+# Fetching with a explicit hash should succeed.
+path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
+[[ $path = $path2 ]]
+
+path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev1\"; }).outPath")
+[[ $(cat $path2/hello) = utrecht ]]
+
+mv ${repo}-tmp $repo
+
+# Using a clean working tree should produce the same result.
+path2=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
+[[ $path = $path2 ]]
+
+# Using an unclean tree should yield the tracked but uncommitted changes.
+mkdir $repo/dir1 $repo/dir2
+echo foo > $repo/dir1/foo
+echo bar > $repo/bar
+echo bar > $repo/dir2/bar
+git -C $repo add dir1/foo
+git -C $repo rm hello
+
+path2=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
+[ ! -e $path2/hello ]
+[ ! -e $path2/bar ]
+[ ! -e $path2/dir2/bar ]
+[[ $(cat $path2/dir1/foo) = foo ]]
+
+[[ $(nix eval --raw "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]]
+
+# ... unless we're using an explicit ref or rev.
+path3=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
+[[ $path = $path3 ]]
+
+path3=$(nix eval --raw "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; }).outPath")
+[[ $path = $path3 ]]
+
+# Committing should not affect the store path.
+git -C $repo commit -m 'Bla3' -a
+
+path4=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchGit file://$repo).outPath")
+[[ $path2 = $path4 ]]
diff --git a/tests/fetchMercurial.sh b/tests/fetchMercurial.sh
new file mode 100644
index 000000000..6cfc0a708
--- /dev/null
+++ b/tests/fetchMercurial.sh
@@ -0,0 +1,84 @@
+source common.sh
+
+if [[ -z $(type -p hg) ]]; then
+ echo "Mercurial not installed; skipping Mercurial tests"
+ exit 99
+fi
+
+clearStore
+
+repo=$TEST_ROOT/hg
+
+rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix/hg
+
+hg init $repo
+echo '[ui]' >> $repo/.hg/hgrc
+echo 'username = Foobar <foobar@example.org>' >> $repo/.hg/hgrc
+
+echo utrecht > $repo/hello
+hg add --cwd $repo hello
+hg commit --cwd $repo -m 'Bla1'
+rev1=$(hg log --cwd $repo -r tip --template '{node}')
+
+echo world > $repo/hello
+hg commit --cwd $repo -m 'Bla2'
+rev2=$(hg log --cwd $repo -r tip --template '{node}')
+
+# Fetch the default branch.
+path=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath")
+[[ $(cat $path/hello) = world ]]
+
+# Fetch using an explicit revision hash.
+path2=$(nix eval --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath")
+[[ $path = $path2 ]]
+
+# Fetch again. This should be cached.
+mv $repo ${repo}-tmp
+path2=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath")
+[[ $path = $path2 ]]
+
+[[ $(nix eval --raw "(builtins.fetchMercurial file://$repo).branch") = default ]]
+[[ $(nix eval "(builtins.fetchMercurial file://$repo).revCount") = 1 ]]
+[[ $(nix eval --raw "(builtins.fetchMercurial file://$repo).rev") = $rev2 ]]
+
+# But with TTL 0, it should fail.
+(! nix eval --tarball-ttl 0 "(builtins.fetchMercurial file://$repo)")
+
+# Fetching with a explicit hash should succeed.
+path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath")
+[[ $path = $path2 ]]
+
+path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev1\"; }).outPath")
+[[ $(cat $path2/hello) = utrecht ]]
+
+mv ${repo}-tmp $repo
+
+# Using a clean working tree should produce the same result.
+path2=$(nix eval --raw "(builtins.fetchMercurial $repo).outPath")
+[[ $path = $path2 ]]
+
+# Using an unclean tree should yield the tracked but uncommitted changes.
+mkdir $repo/dir1 $repo/dir2
+echo foo > $repo/dir1/foo
+echo bar > $repo/bar
+echo bar > $repo/dir2/bar
+hg add --cwd $repo dir1/foo
+hg rm --cwd $repo hello
+
+path2=$(nix eval --raw "(builtins.fetchMercurial $repo).outPath")
+[ ! -e $path2/hello ]
+[ ! -e $path2/bar ]
+[ ! -e $path2/dir2/bar ]
+[[ $(cat $path2/dir1/foo) = foo ]]
+
+[[ $(nix eval --raw "(builtins.fetchMercurial $repo).rev") = 0000000000000000000000000000000000000000 ]]
+
+# ... unless we're using an explicit rev.
+path3=$(nix eval --raw "(builtins.fetchMercurial { url = $repo; rev = \"default\"; }).outPath")
+[[ $path = $path3 ]]
+
+# Committing should not affect the store path.
+hg commit --cwd $repo -m 'Bla3'
+
+path4=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchMercurial file://$repo).outPath")
+[[ $path2 = $path4 ]]
diff --git a/tests/local.mk b/tests/local.mk
index 6160b04c2..2b92ec4e5 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -14,8 +14,11 @@ nix_tests = \
placeholders.sh nix-shell.sh \
linux-sandbox.sh \
build-remote.sh \
- nar-index.sh \
- structured-attrs.sh
+ nar-access.sh \
+ structured-attrs.sh \
+ fetchGit.sh \
+ fetchMercurial.sh \
+ signing.sh
# parallel.sh
install-tests += $(foreach x, $(nix_tests), tests/$(x))
diff --git a/tests/nar-index.nix b/tests/nar-access.nix
index 0e2a7f721..0e2a7f721 100644
--- a/tests/nar-index.nix
+++ b/tests/nar-access.nix
diff --git a/tests/nar-access.sh b/tests/nar-access.sh
new file mode 100644
index 000000000..e29e4689b
--- /dev/null
+++ b/tests/nar-access.sh
@@ -0,0 +1,38 @@
+source common.sh
+
+echo "building test path"
+storePath="$(nix-build nar-access.nix -A a --no-out-link)"
+
+cd "$TEST_ROOT"
+
+# Dump path to nar.
+narFile="$TEST_ROOT/path.nar"
+nix-store --dump $storePath > $narFile
+
+# Check that find and ls-nar match.
+( cd $storePath; find . | sort ) > files.find
+nix ls-nar -R -d $narFile "" | sort > files.ls-nar
+diff -u files.find files.ls-nar
+
+# Check that file contents of data match.
+nix cat-nar $narFile /foo/data > data.cat-nar
+diff -u data.cat-nar $storePath/foo/data
+
+# Check that file contents of baz match.
+nix cat-nar $narFile /foo/baz > baz.cat-nar
+diff -u baz.cat-nar $storePath/foo/baz
+
+nix cat-store $storePath/foo/baz > baz.cat-nar
+diff -u baz.cat-nar $storePath/foo/baz
+
+# Test --json.
+[[ $(nix ls-nar --json $narFile /) = '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' ]]
+[[ $(nix ls-nar --json -R $narFile /foo) = '{"type":"directory","entries":{"bar":{"type":"regular","size":0},"baz":{"type":"regular","size":0},"data":{"type":"regular","size":58}}}' ]]
+[[ $(nix ls-nar --json -R $narFile /foo/bar) = '{"type":"regular","size":0}' ]]
+[[ $(nix ls-store --json $storePath) = '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' ]]
+[[ $(nix ls-store --json -R $storePath/foo) = '{"type":"directory","entries":{"bar":{"type":"regular","size":0},"baz":{"type":"regular","size":0},"data":{"type":"regular","size":58}}}' ]]
+[[ $(nix ls-store --json -R $storePath/foo/bar) = '{"type":"regular","size":0}' ]]
+
+# Test missing files.
+nix ls-store --json -R $storePath/xyzzy 2>&1 | grep 'does not exist in NAR'
+nix ls-store $storePath/xyzzy 2>&1 | grep 'does not exist'
diff --git a/tests/nar-index.sh b/tests/nar-index.sh
deleted file mode 100644
index 51369346c..000000000
--- a/tests/nar-index.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-source common.sh
-
-echo "building test path"
-storePath="$(nix-build nar-index.nix -A a --no-out-link)"
-
-cd "$TEST_ROOT"
-
-echo "dumping path to nar"
-narFile="$TEST_ROOT/path.nar"
-nix-store --dump $storePath > $narFile
-
-echo "check that find and ls-nar match"
-( cd $storePath; find . | sort ) > files.find
-nix ls-nar -R -d $narFile "" | sort > files.ls-nar
-diff -u files.find files.ls-nar
-
-echo "check that file contents of data match"
-nix cat-nar $narFile /foo/data > data.cat-nar
-diff -u data.cat-nar $storePath/foo/data
-
-echo "check that file contents of baz match"
-nix cat-nar $narFile /foo/baz > baz.cat-nar
-diff -u baz.cat-nar $storePath/foo/baz \ No newline at end of file
diff --git a/tests/signing.sh b/tests/signing.sh
new file mode 100644
index 000000000..bef27ac7a
--- /dev/null
+++ b/tests/signing.sh
@@ -0,0 +1,68 @@
+source common.sh
+
+clearStore
+clearCache
+
+nix-store --generate-binary-cache-key cache1.example.org $TEST_ROOT/sk1 $TEST_ROOT/pk1
+pk1=$(cat $TEST_ROOT/pk1)
+nix-store --generate-binary-cache-key cache2.example.org $TEST_ROOT/sk2 $TEST_ROOT/pk2
+pk2=$(cat $TEST_ROOT/pk2)
+
+# Build a path.
+outPath=$(nix-build dependencies.nix --no-out-link --secret-key-files "$TEST_ROOT/sk1 $TEST_ROOT/sk2")
+
+# Verify that the path got signed.
+info=$(nix path-info --json $outPath)
+[[ $info =~ '"ultimate":true' ]]
+[[ $info =~ 'cache1.example.org' ]]
+[[ $info =~ 'cache2.example.org' ]]
+
+# Test "nix verify".
+nix verify -r $outPath
+
+expect 2 nix verify -r $outPath --sigs-needed 1
+
+nix verify -r $outPath --sigs-needed 1 --binary-cache-public-keys $pk1
+
+expect 2 nix verify -r $outPath --sigs-needed 2 --binary-cache-public-keys $pk1
+
+nix verify -r $outPath --sigs-needed 2 --binary-cache-public-keys "$pk1 $pk2"
+
+nix verify --all --sigs-needed 2 --binary-cache-public-keys "$pk1 $pk2"
+
+# Build something unsigned.
+outPath2=$(nix-build simple.nix --no-out-link)
+
+nix verify -r $outPath
+
+# Verify that the path did not get signed but does have the ultimate bit.
+info=$(nix path-info --json $outPath2)
+[[ $info =~ '"ultimate":true' ]]
+(! [[ $info =~ 'signatures' ]])
+
+# Test "nix verify".
+nix verify -r $outPath2
+
+expect 2 nix verify -r $outPath2 --sigs-needed 1
+
+expect 2 nix verify -r $outPath2 --sigs-needed 1 --binary-cache-public-keys $pk1
+
+# Test "nix sign-paths".
+nix sign-paths --key-file $TEST_ROOT/sk1 $outPath2
+
+nix verify -r $outPath2 --sigs-needed 1 --binary-cache-public-keys $pk1
+
+# Copy to a binary cache.
+nix copy --to file://$cacheDir $outPath2
+
+# Verify that signatures got copied.
+info=$(nix path-info --store file://$cacheDir --json $outPath2)
+(! [[ $info =~ '"ultimate":true' ]])
+[[ $info =~ 'cache1.example.org' ]]
+(! [[ $info =~ 'cache2.example.org' ]])
+
+# Verify that adding a signature to a path in a binary cache works.
+nix sign-paths --store file://$cacheDir --key-file $TEST_ROOT/sk2 $outPath2
+info=$(nix path-info --store file://$cacheDir --json $outPath2)
+[[ $info =~ 'cache1.example.org' ]]
+[[ $info =~ 'cache2.example.org' ]]