aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorCarlo Nucera <carlo.nucera@protonmail.com>2020-05-26 11:14:08 -0400
committerCarlo Nucera <carlo.nucera@protonmail.com>2020-05-26 11:14:08 -0400
commit6d73c100417c68a27a23194c78f1252ca511e250 (patch)
tree04eea8818b97d46fe28ade241069a3650f25a0c6 /src
parent8aa46cd340c1294c3d06cd52f85c906bdf749070 (diff)
parent3d3c219d917525b0a131c4332dd65eadfc818f49 (diff)
Merge remote-tracking branch 'origin/master' into enum-FileIngestionMethod
Diffstat (limited to 'src')
-rw-r--r--src/error-demo/error-demo.cc66
-rw-r--r--src/error-demo/local.mk12
-rw-r--r--src/libexpr/attr-path.cc3
-rw-r--r--src/libexpr/common-eval-args.cc45
-rw-r--r--src/libexpr/eval-inline.hh4
-rw-r--r--src/libexpr/eval.cc18
-rw-r--r--src/libexpr/json-to-value.cc63
-rw-r--r--src/libexpr/local.mk4
-rw-r--r--src/libexpr/nixexpr.hh6
-rw-r--r--src/libexpr/parser.y16
-rw-r--r--src/libexpr/primops.cc152
-rw-r--r--src/libexpr/primops.hh1
-rw-r--r--src/libexpr/primops/fetchGit.cc227
-rw-r--r--src/libexpr/primops/fetchMercurial.cc206
-rw-r--r--src/libexpr/primops/fetchTree.cc165
-rw-r--r--src/libexpr/value.hh9
-rw-r--r--src/libfetchers/attrs.cc107
-rw-r--r--src/libfetchers/attrs.hh39
-rw-r--r--src/libfetchers/cache.cc121
-rw-r--r--src/libfetchers/cache.hh34
-rw-r--r--src/libfetchers/fetchers.cc75
-rw-r--r--src/libfetchers/fetchers.hh103
-rw-r--r--src/libfetchers/git.cc438
-rw-r--r--src/libfetchers/github.cc195
-rw-r--r--src/libfetchers/local.mk11
-rw-r--r--src/libfetchers/mercurial.cc303
-rw-r--r--src/libfetchers/path.cc148
-rw-r--r--src/libfetchers/tarball.cc275
-rw-r--r--src/libfetchers/tree-info.cc14
-rw-r--r--src/libfetchers/tree-info.hh29
-rw-r--r--src/libmain/common-args.cc68
-rw-r--r--src/libmain/shared.cc66
-rw-r--r--src/libstore/build.cc176
-rw-r--r--src/libstore/builtins/fetchurl.cc10
-rw-r--r--src/libstore/derivations.cc2
-rw-r--r--src/libstore/filetransfer.cc (renamed from src/libstore/download.cc)283
-rw-r--r--src/libstore/filetransfer.hh (renamed from src/libstore/download.hh)78
-rw-r--r--src/libstore/gc.cc11
-rw-r--r--src/libstore/globals.cc58
-rw-r--r--src/libstore/globals.hh17
-rw-r--r--src/libstore/http-binary-cache-store.cc35
-rw-r--r--src/libstore/s3-binary-cache-store.cc6
-rw-r--r--src/libstore/s3.hh4
-rw-r--r--src/libstore/store-api.cc25
-rw-r--r--src/libstore/store-api.hh1
-rw-r--r--src/libutil/ansicolor.hh15
-rw-r--r--src/libutil/args.cc101
-rw-r--r--src/libutil/args.hh157
-rw-r--r--src/libutil/config.cc35
-rw-r--r--src/libutil/error.cc146
-rw-r--r--src/libutil/error.hh121
-rw-r--r--src/libutil/logging.cc8
-rw-r--r--src/libutil/logging.hh12
-rw-r--r--src/libutil/serialise.hh3
-rw-r--r--src/libutil/tests/local.mk15
-rw-r--r--src/libutil/tests/tests.cc589
-rw-r--r--src/libutil/tests/url.cc266
-rw-r--r--src/libutil/types.hh18
-rw-r--r--src/libutil/url.cc137
-rw-r--r--src/libutil/url.hh62
-rw-r--r--src/libutil/util.cc78
-rw-r--r--src/libutil/util.hh44
-rwxr-xr-xsrc/nix-channel/nix-channel.cc21
-rw-r--r--src/nix-env/nix-env.cc47
-rw-r--r--src/nix-env/user-env.cc2
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc6
-rw-r--r--src/nix/add-to-store.cc17
-rw-r--r--src/nix/build.cc39
-rw-r--r--src/nix/cat.cc8
-rw-r--r--src/nix/command.cc117
-rw-r--r--src/nix/command.hh68
-rw-r--r--src/nix/copy.cc49
-rw-r--r--src/nix/dev-shell.cc344
-rw-r--r--src/nix/doctor.cc4
-rw-r--r--src/nix/dump-path.cc2
-rw-r--r--src/nix/edit.cc2
-rw-r--r--src/nix/eval.cc7
-rw-r--r--src/nix/get-env.sh9
-rw-r--r--src/nix/hash.cc17
-rw-r--r--src/nix/installables.cc35
-rw-r--r--src/nix/installables.hh45
-rw-r--r--src/nix/local.mk6
-rw-r--r--src/nix/log.cc2
-rw-r--r--src/nix/ls.cc20
-rw-r--r--src/nix/main.cc103
-rw-r--r--src/nix/make-content-addressable.cc3
-rw-r--r--src/nix/optimise-store.cc2
-rw-r--r--src/nix/path-info.cc2
-rw-r--r--src/nix/ping-store.cc2
-rw-r--r--src/nix/progress-bar.cc19
-rw-r--r--src/nix/repl.cc44
-rw-r--r--src/nix/run.cc150
-rw-r--r--src/nix/search.cc24
-rw-r--r--src/nix/show-config.cc4
-rw-r--r--src/nix/show-derivation.cc13
-rw-r--r--src/nix/sigs.cc31
-rw-r--r--src/nix/upgrade-nix.cc34
-rw-r--r--src/nix/verify.cc16
-rw-r--r--src/nix/why-depends.cc15
99 files changed, 5280 insertions, 1585 deletions
diff --git a/src/error-demo/error-demo.cc b/src/error-demo/error-demo.cc
new file mode 100644
index 000000000..a9ff6057c
--- /dev/null
+++ b/src/error-demo/error-demo.cc
@@ -0,0 +1,66 @@
+#include "error.hh"
+#include "nixexpr.hh"
+
+#include <iostream>
+#include <optional>
+
+int main()
+{
+ using namespace nix;
+
+ // In each program where errors occur, this has to be set.
+ ErrorInfo::programName = std::optional("error-demo");
+
+ // Error in a program; no hint and no nix code.
+ printErrorInfo(
+ ErrorInfo { .level = elError,
+ .name = "name",
+ .description = "error description",
+ });
+
+ // Warning with name, description, and hint.
+ // The hintfmt function makes all the substituted text yellow.
+ printErrorInfo(
+ ErrorInfo { .level = elWarning,
+ .name = "name",
+ .description = "error description",
+ .hint = std::optional(
+ hintfmt("there was a %1%", "warning")),
+ });
+
+
+ // Warning with nix file, line number, column, and the lines of
+ // code where a warning occurred.
+ SymbolTable testTable;
+ auto problem_file = testTable.create("myfile.nix");
+
+ printErrorInfo(
+ ErrorInfo{
+ .level = elWarning,
+ .name = "warning name",
+ .description = "warning description",
+ .hint = hintfmt("this hint has %1% templated %2%!!", "yellow", "values"),
+ .nixCode = NixCode {
+ .errPos = Pos(problem_file, 40, 13),
+ .prevLineOfCode = std::nullopt,
+ .errLineOfCode = "this is the problem line of code",
+ .nextLineOfCode = std::nullopt
+ }});
+
+ // Error with previous and next lines of code.
+ printErrorInfo(
+ ErrorInfo{
+ .level = elError,
+ .name = "error name",
+ .description = "error description",
+ .hint = hintfmt("this hint has %1% templated %2%!!", "yellow", "values"),
+ .nixCode = NixCode {
+ .errPos = Pos(problem_file, 40, 13),
+ .prevLineOfCode = std::optional("previous line of code"),
+ .errLineOfCode = "this is the problem line of code",
+ .nextLineOfCode = std::optional("next line of code"),
+ }});
+
+
+ return 0;
+}
diff --git a/src/error-demo/local.mk b/src/error-demo/local.mk
new file mode 100644
index 000000000..2c528490a
--- /dev/null
+++ b/src/error-demo/local.mk
@@ -0,0 +1,12 @@
+programs += error-demo
+
+error-demo_DIR := $(d)
+
+error-demo_SOURCES := \
+ $(wildcard $(d)/*.cc) \
+
+error-demo_CXXFLAGS += -I src/libutil -I src/libexpr
+
+error-demo_LIBS = libutil libexpr
+
+error-demo_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system
diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc
index 4545bfd72..76d101b98 100644
--- a/src/libexpr/attr-path.cc
+++ b/src/libexpr/attr-path.cc
@@ -37,9 +37,6 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
{
Strings tokens = parseAttrPath(attrPath);
- Error attrError =
- Error(format("attribute selection path '%1%' does not match expression") % attrPath);
-
Value * v = &vIn;
Pos pos = noPos;
diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc
index 13950ab8d..44baadd53 100644
--- a/src/libexpr/common-eval-args.cc
+++ b/src/libexpr/common-eval-args.cc
@@ -1,31 +1,36 @@
#include "common-eval-args.hh"
#include "shared.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "util.hh"
#include "eval.hh"
+#include "fetchers.hh"
+#include "store-api.hh"
namespace nix {
MixEvalArgs::MixEvalArgs()
{
- mkFlag()
- .longName("arg")
- .description("argument to be passed to Nix functions")
- .labels({"name", "expr"})
- .handler([&](std::vector<std::string> ss) { autoArgs[ss[0]] = 'E' + ss[1]; });
+ addFlag({
+ .longName = "arg",
+ .description = "argument to be passed to Nix functions",
+ .labels = {"name", "expr"},
+ .handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
+ });
- mkFlag()
- .longName("argstr")
- .description("string-valued argument to be passed to Nix functions")
- .labels({"name", "string"})
- .handler([&](std::vector<std::string> ss) { autoArgs[ss[0]] = 'S' + ss[1]; });
+ addFlag({
+ .longName = "argstr",
+ .description = "string-valued argument to be passed to Nix functions",
+ .labels = {"name", "string"},
+ .handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
+ });
- mkFlag()
- .shortName('I')
- .longName("include")
- .description("add a path to the list of locations used to look up <...> file names")
- .label("path")
- .handler([&](std::string s) { searchPath.push_back(s); });
+ addFlag({
+ .longName = "include",
+ .shortName = 'I',
+ .description = "add a path to the list of locations used to look up <...> file names",
+ .labels = {"path"},
+ .handler = {[&](std::string s) { searchPath.push_back(s); }}
+ });
}
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
@@ -46,9 +51,9 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
Path lookupFileArg(EvalState & state, string s)
{
if (isUri(s)) {
- CachedDownloadRequest request(s);
- request.unpack = true;
- return getDownloader()->downloadCached(state.store, request).path;
+ return state.store->toRealPath(
+ fetchers::downloadTarball(
+ state.store, resolveUri(s), "source", false).storePath);
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p = s.substr(1, s.size() - 2);
return state.findFile(p);
diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh
index c27116e3b..942cda1ea 100644
--- a/src/libexpr/eval-inline.hh
+++ b/src/libexpr/eval-inline.hh
@@ -57,7 +57,7 @@ inline void EvalState::forceAttrs(Value & v)
inline void EvalState::forceAttrs(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (v.type != tAttrs)
throwTypeError("value is %1% while a set was expected, at %2%", v, pos);
}
@@ -73,7 +73,7 @@ inline void EvalState::forceList(Value & v)
inline void EvalState::forceList(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (!v.isList())
throwTypeError("value is %1% while a list was expected, at %2%", v, pos);
}
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 88bbf3b32..82eb1582e 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -5,7 +5,7 @@
#include "derivations.hh"
#include "globals.hh"
#include "eval-inline.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "json.hh"
#include "function-trace.hh"
@@ -22,6 +22,8 @@
#if HAVE_BOEHMGC
+#define GC_INCLUDE_NEW
+
#include <gc/gc.h>
#include <gc/gc_cpp.h>
@@ -56,6 +58,12 @@ static char * dupStringWithLen(const char * s, size_t size)
}
+RootValue allocRootValue(Value * v)
+{
+ return std::allocate_shared<Value *>(traceable_allocator<Value *>(), v);
+}
+
+
static void printValue(std::ostream & str, std::set<const Value *> & active, const Value & v)
{
checkInterrupt();
@@ -1256,7 +1264,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v)
void ExprIf::eval(EvalState & state, Env & env, Value & v)
{
- (state.evalBool(env, cond) ? then : else_)->eval(state, env, v);
+ (state.evalBool(env, cond, pos) ? then : else_)->eval(state, env, v);
}
@@ -1502,7 +1510,7 @@ NixFloat EvalState::forceFloat(Value & v, const Pos & pos)
bool EvalState::forceBool(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (v.type != tBool)
throwTypeError("value is %1% while a Boolean was expected, at %2%", v, pos);
return v.boolean;
@@ -1517,7 +1525,7 @@ bool EvalState::isFunctor(Value & fun)
void EvalState::forceFunction(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (v.type != tLambda && v.type != tPrimOp && v.type != tPrimOpApp && !isFunctor(v))
throwTypeError("value is %1% while a function was expected, at %2%", v, pos);
}
@@ -1594,7 +1602,7 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v,
string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
bool coerceMore, bool copyToStore)
{
- forceValue(v);
+ forceValue(v, pos);
string s;
diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc
index 1fdce1983..76e1a26bf 100644
--- a/src/libexpr/json-to-value.cc
+++ b/src/libexpr/json-to-value.cc
@@ -4,7 +4,6 @@
#include <nlohmann/json.hpp>
using json = nlohmann::json;
-using std::unique_ptr;
namespace nix {
@@ -13,69 +12,69 @@ namespace nix {
class JSONSax : nlohmann::json_sax<json> {
class JSONState {
protected:
- unique_ptr<JSONState> parent;
- Value * v;
+ std::unique_ptr<JSONState> parent;
+ RootValue v;
public:
- virtual unique_ptr<JSONState> resolve(EvalState &)
+ virtual std::unique_ptr<JSONState> resolve(EvalState &)
{
throw std::logic_error("tried to close toplevel json parser state");
- };
- explicit JSONState(unique_ptr<JSONState>&& p) : parent(std::move(p)), v(nullptr) {};
- explicit JSONState(Value* v) : v(v) {};
- JSONState(JSONState& p) = delete;
- Value& value(EvalState & state)
+ }
+ explicit JSONState(std::unique_ptr<JSONState> && p) : parent(std::move(p)) {}
+ explicit JSONState(Value * v) : v(allocRootValue(v)) {}
+ JSONState(JSONState & p) = delete;
+ Value & value(EvalState & state)
{
- if (v == nullptr)
- v = state.allocValue();
- return *v;
- };
- virtual ~JSONState() {};
- virtual void add() {};
+ if (!v)
+ v = allocRootValue(state.allocValue());
+ return **v;
+ }
+ virtual ~JSONState() {}
+ virtual void add() {}
};
class JSONObjectState : public JSONState {
using JSONState::JSONState;
- ValueMap attrs = ValueMap();
- virtual unique_ptr<JSONState> resolve(EvalState & state) override
+ ValueMap attrs;
+ std::unique_ptr<JSONState> resolve(EvalState & state) override
{
- Value& v = parent->value(state);
+ Value & v = parent->value(state);
state.mkAttrs(v, attrs.size());
for (auto & i : attrs)
v.attrs->push_back(Attr(i.first, i.second));
return std::move(parent);
}
- virtual void add() override { v = nullptr; };
+ void add() override { v = nullptr; }
public:
- void key(string_t& name, EvalState & state)
+ void key(string_t & name, EvalState & state)
{
- attrs[state.symbols.create(name)] = &value(state);
+ attrs.insert_or_assign(state.symbols.create(name), &value(state));
}
};
class JSONListState : public JSONState {
- ValueVector values = ValueVector();
- virtual unique_ptr<JSONState> resolve(EvalState & state) override
+ ValueVector values;
+ std::unique_ptr<JSONState> resolve(EvalState & state) override
{
- Value& v = parent->value(state);
+ Value & v = parent->value(state);
state.mkList(v, values.size());
for (size_t n = 0; n < values.size(); ++n) {
v.listElems()[n] = values[n];
}
return std::move(parent);
}
- virtual void add() override {
- values.push_back(v);
+ void add() override {
+ values.push_back(*v);
v = nullptr;
- };
+ }
public:
- JSONListState(unique_ptr<JSONState>&& p, std::size_t reserve) : JSONState(std::move(p))
+ JSONListState(std::unique_ptr<JSONState> && p, std::size_t reserve) : JSONState(std::move(p))
{
values.reserve(reserve);
}
};
EvalState & state;
- unique_ptr<JSONState> rs;
+ std::unique_ptr<JSONState> rs;
template<typename T, typename... Args> inline bool handle_value(T f, Args... args)
{
@@ -107,12 +106,12 @@ public:
return handle_value(mkInt, val);
}
- bool number_float(number_float_t val, const string_t& s)
+ bool number_float(number_float_t val, const string_t & s)
{
return handle_value(mkFloat, val);
}
- bool string(string_t& val)
+ bool string(string_t & val)
{
return handle_value<void(Value&, const char*)>(mkString, val.c_str());
}
@@ -123,7 +122,7 @@ public:
return true;
}
- bool key(string_t& name)
+ bool key(string_t & name)
{
dynamic_cast<JSONObjectState*>(rs.get())->key(name, state);
return true;
diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk
index a4ccab376..917e8a1c7 100644
--- a/src/libexpr/local.mk
+++ b/src/libexpr/local.mk
@@ -6,9 +6,9 @@ libexpr_DIR := $(d)
libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexer-tab.cc $(d)/parser-tab.cc
-libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libmain -I src/libexpr
+libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
-libexpr_LIBS = libutil libstore libnixrust
+libexpr_LIBS = libutil libstore libfetchers libnixrust
libexpr_LDFLAGS =
ifneq ($(OS), FreeBSD)
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index f7e9105a4..25798cac6 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -209,9 +209,10 @@ struct ExprList : Expr
struct Formal
{
+ Pos pos;
Symbol name;
Expr * def;
- Formal(const Symbol & name, Expr * def) : name(name), def(def) { };
+ Formal(const Pos & pos, const Symbol & name, Expr * def) : pos(pos), name(name), def(def) { };
};
struct Formals
@@ -261,8 +262,9 @@ struct ExprWith : Expr
struct ExprIf : Expr
{
+ Pos pos;
Expr * cond, * then, * else_;
- ExprIf(Expr * cond, Expr * then, Expr * else_) : cond(cond), then(then), else_(else_) { };
+ ExprIf(const Pos & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { };
COMMON_METHODS
};
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 9c769e803..1993fa6c1 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -335,7 +335,7 @@ expr_function
;
expr_if
- : IF expr THEN expr ELSE expr { $$ = new ExprIf($2, $4, $6); }
+ : IF expr THEN expr ELSE expr { $$ = new ExprIf(CUR_POS, $2, $4, $6); }
| expr_op
;
@@ -531,8 +531,8 @@ formals
;
formal
- : ID { $$ = new Formal(data->symbols.create($1), 0); }
- | ID '?' expr { $$ = new Formal(data->symbols.create($1), $3); }
+ : ID { $$ = new Formal(CUR_POS, data->symbols.create($1), 0); }
+ | ID '?' expr { $$ = new Formal(CUR_POS, data->symbols.create($1), $3); }
;
%%
@@ -544,7 +544,8 @@ formal
#include <unistd.h>
#include "eval.hh"
-#include "download.hh"
+#include "filetransfer.hh"
+#include "fetchers.hh"
#include "store-api.hh"
@@ -687,10 +688,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (isUri(elem.second)) {
try {
- CachedDownloadRequest request(elem.second);
- request.unpack = true;
- res = { true, getDownloader()->downloadCached(store, request).path };
- } catch (DownloadError & e) {
+ res = { true, store->toRealPath(fetchers::downloadTarball(
+ store, resolveUri(elem.second), "source", false).storePath) };
+ } catch (FileTransferError & e) {
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
res = { false, "" };
}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 415e77fe7..fcbb75ceb 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1,6 +1,5 @@
#include "archive.hh"
#include "derivations.hh"
-#include "download.hh"
#include "eval-inline.hh"
#include "eval.hh"
#include "globals.hh"
@@ -122,16 +121,16 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
}
w.attrs->sort();
- static Value * fun = nullptr;
+ static RootValue fun;
if (!fun) {
- fun = state.allocValue();
+ fun = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "imported-drv-to-derivation.nix.gen.hh"
- , "/"), *fun);
+ , "/"), **fun);
}
- state.forceFunction(*fun, pos);
- mkApp(v, *fun, w);
+ state.forceFunction(**fun, pos);
+ mkApp(v, **fun, w);
state.forceAttrs(v, pos);
} else {
state.forceAttrs(*args[0]);
@@ -242,7 +241,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
/* Return a string representing the type of the expression. */
static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
string t;
switch (args[0]->type) {
case tInt: t = "int"; break;
@@ -270,7 +269,7 @@ static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Valu
/* Determine whether the argument is the null value. */
static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tNull);
}
@@ -278,7 +277,7 @@ static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Valu
/* Determine whether the argument is a function. */
static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
bool res;
switch (args[0]->type) {
case tLambda:
@@ -297,21 +296,21 @@ static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args,
/* Determine whether the argument is an integer. */
static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tInt);
}
/* Determine whether the argument is a float. */
static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tFloat);
}
/* Determine whether the argument is a string. */
static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tString);
}
@@ -319,14 +318,14 @@ static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Va
/* Determine whether the argument is a Boolean. */
static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tBool);
}
/* Determine whether the argument is a path. */
static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tPath);
}
@@ -383,7 +382,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
args[0]->attrs->find(state.symbols.create("operator"));
if (op == args[0]->attrs->end())
throw EvalError(format("attribute 'operator' required, at %1%") % pos);
- state.forceValue(*op->value);
+ state.forceValue(*op->value, pos);
/* Construct the closure by applying the operator to element of
`workSet', adding the result to `workSet', continuing until
@@ -402,7 +401,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
e->attrs->find(state.symbols.create("key"));
if (key == e->attrs->end())
throw EvalError(format("attribute 'key' required, at %1%") % pos);
- state.forceValue(*key->value);
+ state.forceValue(*key->value, pos);
if (!doneKeys.insert(key->value).second) continue;
res.push_back(e);
@@ -414,7 +413,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
/* Add the values returned by the operator to the work set. */
for (unsigned int n = 0; n < call.listSize(); ++n) {
- state.forceValue(*call.listElems()[n]);
+ state.forceValue(*call.listElems()[n], pos);
workSet.push_back(call.listElems()[n]);
}
}
@@ -446,7 +445,7 @@ static void prim_throw(EvalState & state, const Pos & pos, Value * * args, Value
static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
try {
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
} catch (Error & e) {
PathSet context;
@@ -462,7 +461,7 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val
{
state.mkAttrs(v, 2);
try {
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
v.attrs->push_back(Attr(state.sValue, args[0]));
mkBool(*state.allocAttr(v, state.symbols.create("success")), true);
} catch (AssertionError & e) {
@@ -484,8 +483,8 @@ static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Valu
/* Evaluate the first argument, then return the second argument. */
static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[0], pos);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
@@ -495,7 +494,7 @@ static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value &
static void prim_deepSeq(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValueDeep(*args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
@@ -504,12 +503,12 @@ static void prim_deepSeq(EvalState & state, const Pos & pos, Value * * args, Val
return the second expression. Useful for debugging. */
static void prim_trace(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type == tString)
printError(format("trace: %1%") % args[0]->string.s);
else
printError(format("trace: %1%") % *args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
@@ -600,7 +599,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
try {
if (ignoreNulls) {
- state.forceValue(*i->value);
+ state.forceValue(*i->value, pos);
if (i->value->type == tNull) continue;
}
@@ -1024,7 +1023,9 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
for (auto path : context) {
if (path.at(0) != '/')
- throw EvalError(format("in 'toFile': the file '%1%' cannot refer to derivation outputs, at %2%") % name % pos);
+ throw EvalError(format(
+ "in 'toFile': the file named '%1%' must not contain a reference "
+ "to a derivation but contains (%2%), at %3%") % name % path % pos);
refs.insert(state.store->parseStorePath(path));
}
@@ -1094,7 +1095,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
if (!context.empty())
throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos);
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
@@ -1120,7 +1121,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
} else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "filter") {
- state.forceValue(*attr.value);
+ state.forceValue(*attr.value, pos);
filterFun = attr.value;
} else if (n == "recursive")
recursive = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) };
@@ -1191,7 +1192,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
throw EvalError(format("attribute '%1%' missing, at %2%") % attr % pos);
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
- state.forceValue(*i->value);
+ state.forceValue(*i->value, pos);
v = *i->value;
}
@@ -1221,7 +1222,7 @@ static void prim_hasAttr(EvalState & state, const Pos & pos, Value * * args, Val
/* Determine whether the argument is a set. */
static void prim_isAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tAttrs);
}
@@ -1347,7 +1348,7 @@ static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Va
*/
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(format("'functionArgs' requires a function, at %1%") % pos);
@@ -1357,9 +1358,12 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
}
state.mkAttrs(v, args[0]->lambda.fun->formals->formals.size());
- for (auto & i : args[0]->lambda.fun->formals->formals)
+ for (auto & i : args[0]->lambda.fun->formals->formals) {
// !!! should optimise booleans (allocate only once)
- mkBool(*state.allocAttr(v, i.name), i.def);
+ Value * value = state.allocValue();
+ v.attrs->push_back(Attr(i.name, value, &i.pos));
+ mkBool(*value, i.def);
+ }
v.attrs->sort();
}
@@ -1390,7 +1394,7 @@ static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Va
/* Determine whether the argument is a list. */
static void prim_isList(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->isList());
}
@@ -1400,7 +1404,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
state.forceList(list, pos);
if (n < 0 || (unsigned int) n >= list.listSize())
throw Error(format("list index %1% is out of bounds, at %2%") % n % pos);
- state.forceValue(*list.listElems()[n]);
+ state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n];
}
@@ -1523,9 +1527,9 @@ static void prim_foldlStrict(EvalState & state, const Pos & pos, Value * * args,
vCur = n == args[2]->listSize() - 1 ? &v : state.allocValue();
state.callFunction(vTmp, *args[2]->listElems()[n], *vCur, pos);
}
- state.forceValue(v);
+ state.forceValue(v, pos);
} else {
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
}
@@ -1590,7 +1594,7 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
auto len = args[1]->listSize();
state.mkList(v, len);
for (unsigned int n = 0; n < len; ++n) {
- state.forceValue(*args[1]->listElems()[n]);
+ state.forceValue(*args[1]->listElems()[n], pos);
v.listElems()[n] = args[1]->listElems()[n];
}
@@ -1625,7 +1629,7 @@ static void prim_partition(EvalState & state, const Pos & pos, Value * * args, V
for (unsigned int n = 0; n < len; ++n) {
auto vElem = args[1]->listElems()[n];
- state.forceValue(*vElem);
+ state.forceValue(*vElem, pos);
Value res;
state.callFunction(*args[0], *vElem, res, pos);
if (state.forceBool(res, pos))
@@ -1756,8 +1760,8 @@ static void prim_bitXor(EvalState & state, const Pos & pos, Value * * args, Valu
static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[0], pos);
+ state.forceValue(*args[1], pos);
CompareValues comp;
mkBool(v, comp(args[0], args[1]));
}
@@ -2049,68 +2053,6 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
/*************************************************************
- * Networking
- *************************************************************/
-
-
-void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
- const string & who, bool unpack, const std::string & defaultName)
-{
- CachedDownloadRequest request("");
- request.unpack = unpack;
- request.name = defaultName;
-
- state.forceValue(*args[0]);
-
- if (args[0]->type == tAttrs) {
-
- state.forceAttrs(*args[0], pos);
-
- for (auto & attr : *args[0]->attrs) {
- string n(attr.name);
- if (n == "url")
- request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
- else if (n == "sha256")
- request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
- else if (n == "name")
- request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
- else
- throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
- }
-
- if (request.uri.empty())
- throw EvalError(format("'url' argument required, at %1%") % pos);
-
- } else
- request.uri = state.forceStringNoCtx(*args[0], pos);
-
- state.checkURI(request.uri);
-
- if (evalSettings.pureEval && !request.expectedHash)
- throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
-
- auto res = getDownloader()->downloadCached(state.store, request);
-
- if (state.allowedPaths)
- state.allowedPaths->insert(res.path);
-
- mkString(v, res.storePath, PathSet({res.storePath}));
-}
-
-
-static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- fetch(state, pos, args, v, "fetchurl", false, "");
-}
-
-
-static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- fetch(state, pos, args, v, "fetchTarball", true, "source");
-}
-
-
-/*************************************************************
* Primop registration
*************************************************************/
@@ -2292,10 +2234,6 @@ void EvalState::createBaseEnv()
addPrimOp("derivationStrict", 1, prim_derivationStrict);
addPrimOp("placeholder", 1, prim_placeholder);
- // Networking
- addPrimOp("__fetchurl", 1, prim_fetchurl);
- addPrimOp("fetchTarball", 1, prim_fetchTarball);
-
/* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */
string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index c790b30f6..05d0792ef 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -20,6 +20,7 @@ struct RegisterPrimOp
them. */
/* Load a ValueInitializer from a DSO and return whatever it initializes */
void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value & v);
+
/* Execute a program and parse its output */
void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v);
diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc
index 9f2b673ad..1a8798fcc 100644
--- a/src/libexpr/primops/fetchGit.cc
+++ b/src/libexpr/primops/fetchGit.cc
@@ -1,203 +1,19 @@
#include "primops.hh"
#include "eval-inline.hh"
-#include "download.hh"
#include "store-api.hh"
-#include "pathlocks.hh"
#include "hash.hh"
-#include "tarfile.hh"
-
-#include <sys/time.h>
-
-#include <regex>
-
-#include <nlohmann/json.hpp>
-
-using namespace std::string_literals;
+#include "fetchers.hh"
+#include "url.hh"
namespace nix {
-struct GitInfo
-{
- Path storePath;
- std::string rev;
- std::string shortRev;
- uint64_t revCount = 0;
-};
-
-std::regex revRegex("^[0-9a-fA-F]{40}$");
-
-GitInfo exportGit(ref<Store> store, const std::string & uri,
- std::optional<std::string> ref, std::string rev,
- const std::string & name)
-{
- if (evalSettings.pureEval && rev == "")
- throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
-
- if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) {
-
- bool clean = true;
-
- try {
- runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" });
- } catch (ExecError & e) {
- if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
- clean = false;
- }
-
- if (!clean) {
-
- /* This is an unclean working tree. So copy all tracked files. */
- GitInfo gitInfo;
- gitInfo.rev = "0000000000000000000000000000000000000000";
- gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
-
- auto files = tokenizeString<std::set<std::string>>(
- runProgram("git", true, { "-C", uri, "ls-files", "-z" }), "\0"s);
-
- PathFilter filter = [&](const Path & p) -> bool {
- assert(hasPrefix(p, uri));
- std::string file(p, uri.size() + 1);
-
- auto st = lstat(p);
-
- if (S_ISDIR(st.st_mode)) {
- auto prefix = file + "/";
- auto i = files.lower_bound(prefix);
- return i != files.end() && hasPrefix(*i, prefix);
- }
-
- return files.count(file);
- };
-
- gitInfo.storePath = store->printStorePath(store->addToStore("source", uri, FileIngestionMethod::Recursive, htSHA256, filter));
-
- return gitInfo;
- }
-
- // clean working tree, but no ref or rev specified. Use 'HEAD'.
- rev = chomp(runProgram("git", true, { "-C", uri, "rev-parse", "HEAD" }));
- ref = "HEAD"s;
- }
-
- if (!ref) ref = "HEAD"s;
-
- if (rev != "" && !std::regex_match(rev, revRegex))
- throw Error("invalid Git revision '%s'", rev);
-
- deletePath(getCacheDir() + "/nix/git");
-
- Path cacheDir = getCacheDir() + "/nix/gitv2/" + hashString(htSHA256, uri).to_string(Base32, false);
-
- if (!pathExists(cacheDir)) {
- createDirs(dirOf(cacheDir));
- runProgram("git", true, { "init", "--bare", cacheDir });
- }
-
- Path localRefFile;
- if (ref->compare(0, 5, "refs/") == 0)
- localRefFile = cacheDir + "/" + *ref;
- else
- localRefFile = cacheDir + "/refs/heads/" + *ref;
-
- bool doFetch;
- time_t now = time(0);
- /* If a rev was specified, we need to fetch if it's not in the
- repo. */
- if (rev != "") {
- try {
- runProgram("git", true, { "-C", cacheDir, "cat-file", "-e", rev });
- doFetch = false;
- } catch (ExecError & e) {
- if (WIFEXITED(e.status)) {
- doFetch = true;
- } else {
- throw;
- }
- }
- } else {
- /* If the local ref is older than ‘tarball-ttl’ seconds, do a
- git fetch to update the local ref to the remote ref. */
- struct stat st;
- doFetch = stat(localRefFile.c_str(), &st) != 0 ||
- (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
- }
- if (doFetch)
- {
- Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri));
-
- // FIXME: git stderr messes up our progress indicator, so
- // we're using --quiet for now. Should process its stderr.
- runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, fmt("%s:%s", *ref, *ref) });
-
- struct timeval times[2];
- times[0].tv_sec = now;
- times[0].tv_usec = 0;
- times[1].tv_sec = now;
- times[1].tv_usec = 0;
-
- utimes(localRefFile.c_str(), times);
- }
-
- // FIXME: check whether rev is an ancestor of ref.
- GitInfo gitInfo;
- gitInfo.rev = rev != "" ? rev : chomp(readFile(localRefFile));
- gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
-
- printTalkative("using revision %s of repo '%s'", gitInfo.rev, uri);
-
- std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + gitInfo.rev).to_string(Base32, false);
- Path storeLink = cacheDir + "/" + storeLinkName + ".link";
- PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink)); // FIXME: broken
-
- try {
- auto json = nlohmann::json::parse(readFile(storeLink));
-
- assert(json["name"] == name && json["rev"] == gitInfo.rev);
-
- gitInfo.storePath = json["storePath"];
-
- if (store->isValidPath(store->parseStorePath(gitInfo.storePath))) {
- gitInfo.revCount = json["revCount"];
- return gitInfo;
- }
-
- } catch (SysError & e) {
- if (e.errNo != ENOENT) throw;
- }
-
- auto source = sinkToSource([&](Sink & sink) {
- RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
- gitOptions.standardOut = &sink;
- runProgram2(gitOptions);
- });
-
- Path tmpDir = createTempDir();
- AutoDelete delTmpDir(tmpDir, true);
-
- unpackTarfile(*source, tmpDir);
-
- gitInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
-
- gitInfo.revCount = std::stoull(runProgram("git", true, { "-C", cacheDir, "rev-list", "--count", gitInfo.rev }));
-
- nlohmann::json json;
- json["storePath"] = gitInfo.storePath;
- json["uri"] = uri;
- json["name"] = name;
- json["rev"] = gitInfo.rev;
- json["revCount"] = gitInfo.revCount;
-
- writeFile(storeLink, json.dump());
-
- return gitInfo;
-}
-
static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
std::string url;
std::optional<std::string> ref;
- std::string rev;
+ std::optional<Hash> rev;
std::string name = "source";
+ bool fetchSubmodules = false;
PathSet context;
state.forceValue(*args[0]);
@@ -213,9 +29,11 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else if (n == "ref")
ref = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "rev")
- rev = state.forceStringNoCtx(*attr.value, *attr.pos);
+ rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA1);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "submodules")
+ fetchSubmodules = state.forceBool(*attr.value, *attr.pos);
else
throw EvalError("unsupported argument '%s' to 'fetchGit', at %s", attr.name, *attr.pos);
}
@@ -230,17 +48,36 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
// whitelist. Ah well.
state.checkURI(url);
- auto gitInfo = exportGit(state.store, url, ref, rev, name);
+ if (evalSettings.pureEval && !rev)
+ throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
+
+ fetchers::Attrs attrs;
+ attrs.insert_or_assign("type", "git");
+ attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
+ if (ref) attrs.insert_or_assign("ref", *ref);
+ if (rev) attrs.insert_or_assign("rev", rev->gitRev());
+ if (fetchSubmodules) attrs.insert_or_assign("submodules", true);
+ auto input = fetchers::inputFromAttrs(attrs);
+
+ // FIXME: use name?
+ auto [tree, input2] = input->fetchTree(state.store);
state.mkAttrs(v, 8);
- mkString(*state.allocAttr(v, state.sOutPath), gitInfo.storePath, PathSet({gitInfo.storePath}));
- mkString(*state.allocAttr(v, state.symbols.create("rev")), gitInfo.rev);
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), gitInfo.shortRev);
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), gitInfo.revCount);
+ auto storePath = state.store->printStorePath(tree.storePath);
+ mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
+ // Backward compatibility: set 'rev' to
+ // 0000000000000000000000000000000000000000 for a dirty tree.
+ auto rev2 = input2->getRev().value_or(Hash(htSHA1));
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
+ // Backward compatibility: set 'revCount' to 0 for a dirty tree.
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")),
+ tree.info.revCount.value_or(0));
+ mkBool(*state.allocAttr(v, state.symbols.create("submodules")), fetchSubmodules);
v.attrs->sort();
if (state.allowedPaths)
- state.allowedPaths->insert(state.store->toRealPath(gitInfo.storePath));
+ state.allowedPaths->insert(tree.actualPath);
}
static RegisterPrimOp r("fetchGit", 1, prim_fetchGit);
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index 548f4e392..0a1ba49d5 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -1,174 +1,18 @@
#include "primops.hh"
#include "eval-inline.hh"
-#include "download.hh"
#include "store-api.hh"
-#include "pathlocks.hh"
-
-#include <sys/time.h>
+#include "fetchers.hh"
+#include "url.hh"
#include <regex>
-#include <nlohmann/json.hpp>
-
-using namespace std::string_literals;
-
namespace nix {
-struct HgInfo
-{
- Path storePath;
- std::string branch;
- std::string rev;
- uint64_t revCount = 0;
-};
-
-std::regex commitHashRegex("^[0-9a-fA-F]{40}$");
-
-HgInfo exportMercurial(ref<Store> store, const std::string & uri,
- std::string rev, const std::string & name)
-{
- if (evalSettings.pureEval && rev == "")
- throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
-
- if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) {
-
- bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == "";
-
- if (!clean) {
-
- /* This is an unclean working tree. So copy all tracked
- files. */
-
- printTalkative("copying unclean Mercurial working tree '%s'", uri);
-
- HgInfo hgInfo;
- hgInfo.rev = "0000000000000000000000000000000000000000";
- hgInfo.branch = chomp(runProgram("hg", true, { "branch", "-R", uri }));
-
- auto files = tokenizeString<std::set<std::string>>(
- runProgram("hg", true, { "status", "-R", uri, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
-
- PathFilter filter = [&](const Path & p) -> bool {
- assert(hasPrefix(p, uri));
- std::string file(p, uri.size() + 1);
-
- auto st = lstat(p);
-
- if (S_ISDIR(st.st_mode)) {
- auto prefix = file + "/";
- auto i = files.lower_bound(prefix);
- return i != files.end() && hasPrefix(*i, prefix);
- }
-
- return files.count(file);
- };
-
- hgInfo.storePath = store->printStorePath(store->addToStore("source", uri, FileIngestionMethod::Recursive, htSHA256, filter));
-
- return hgInfo;
- }
- }
-
- if (rev == "") rev = "default";
-
- Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, uri).to_string(Base32, false));
-
- Path stampFile = fmt("%s/.hg/%s.stamp", cacheDir, hashString(htSHA512, rev).to_string(Base32, false));
-
- /* If we haven't pulled this repo less than ‘tarball-ttl’ seconds,
- do so now. */
- time_t now = time(0);
- struct stat st;
- if (stat(stampFile.c_str(), &st) != 0 ||
- (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now)
- {
- /* Except that if this is a commit hash that we already have,
- we don't have to pull again. */
- if (!(std::regex_match(rev, commitHashRegex)
- && pathExists(cacheDir)
- && runProgram(
- RunOptions("hg", { "log", "-R", cacheDir, "-r", rev, "--template", "1" })
- .killStderr(true)).second == "1"))
- {
- Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", uri));
-
- if (pathExists(cacheDir)) {
- try {
- runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
- }
- catch (ExecError & e) {
- string transJournal = cacheDir + "/.hg/store/journal";
- /* hg throws "abandoned transaction" error only if this file exists */
- if (pathExists(transJournal)) {
- runProgram("hg", true, { "recover", "-R", cacheDir });
- runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
- } else {
- throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status)));
- }
- }
- } else {
- createDirs(dirOf(cacheDir));
- runProgram("hg", true, { "clone", "--noupdate", "--", uri, cacheDir });
- }
- }
-
- writeFile(stampFile, "");
- }
-
- auto tokens = tokenizeString<std::vector<std::string>>(
- runProgram("hg", true, { "log", "-R", cacheDir, "-r", rev, "--template", "{node} {rev} {branch}" }));
- assert(tokens.size() == 3);
-
- HgInfo hgInfo;
- hgInfo.rev = tokens[0];
- hgInfo.revCount = std::stoull(tokens[1]);
- hgInfo.branch = tokens[2];
-
- std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + hgInfo.rev).to_string(Base32, false);
- Path storeLink = fmt("%s/.hg/%s.link", cacheDir, storeLinkName);
-
- try {
- auto json = nlohmann::json::parse(readFile(storeLink));
-
- assert(json["name"] == name && json["rev"] == hgInfo.rev);
-
- hgInfo.storePath = json["storePath"];
-
- if (store->isValidPath(store->parseStorePath(hgInfo.storePath))) {
- printTalkative("using cached Mercurial store path '%s'", hgInfo.storePath);
- return hgInfo;
- }
-
- } catch (SysError & e) {
- if (e.errNo != ENOENT) throw;
- }
-
- Path tmpDir = createTempDir();
- AutoDelete delTmpDir(tmpDir, true);
-
- runProgram("hg", true, { "archive", "-R", cacheDir, "-r", rev, tmpDir });
-
- deletePath(tmpDir + "/.hg_archival.txt");
-
- hgInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
-
- nlohmann::json json;
- json["storePath"] = hgInfo.storePath;
- json["uri"] = uri;
- json["name"] = name;
- json["branch"] = hgInfo.branch;
- json["rev"] = hgInfo.rev;
- json["revCount"] = hgInfo.revCount;
-
- writeFile(storeLink, json.dump());
-
- return hgInfo;
-}
-
static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
std::string url;
- std::string rev;
+ std::optional<Hash> rev;
+ std::optional<std::string> ref;
std::string name = "source";
PathSet context;
@@ -182,8 +26,15 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
string n(attr.name);
if (n == "url")
url = state.coerceToString(*attr.pos, *attr.value, context, false, false);
- else if (n == "rev")
- rev = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "rev") {
+ // Ugly: unlike fetchGit, here the "rev" attribute can
+ // be both a revision or a branch/tag name.
+ auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
+ if (std::regex_match(value, revRegex))
+ rev = Hash(value, htSHA1);
+ else
+ ref = value;
+ }
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
@@ -200,18 +51,35 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
// whitelist. Ah well.
state.checkURI(url);
- auto hgInfo = exportMercurial(state.store, url, rev, name);
+ if (evalSettings.pureEval && !rev)
+ throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
+
+ fetchers::Attrs attrs;
+ attrs.insert_or_assign("type", "hg");
+ attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
+ if (ref) attrs.insert_or_assign("ref", *ref);
+ if (rev) attrs.insert_or_assign("rev", rev->gitRev());
+ auto input = fetchers::inputFromAttrs(attrs);
+
+ // FIXME: use name
+ auto [tree, input2] = input->fetchTree(state.store);
state.mkAttrs(v, 8);
- mkString(*state.allocAttr(v, state.sOutPath), hgInfo.storePath, PathSet({hgInfo.storePath}));
- mkString(*state.allocAttr(v, state.symbols.create("branch")), hgInfo.branch);
- mkString(*state.allocAttr(v, state.symbols.create("rev")), hgInfo.rev);
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(hgInfo.rev, 0, 12));
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), hgInfo.revCount);
+ auto storePath = state.store->printStorePath(tree.storePath);
+ mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
+ if (input2->getRef())
+ mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
+ // Backward compatibility: set 'rev' to
+ // 0000000000000000000000000000000000000000 for a dirty tree.
+ auto rev2 = input2->getRev().value_or(Hash(htSHA1));
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
+ if (tree.info.revCount)
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
v.attrs->sort();
if (state.allowedPaths)
- state.allowedPaths->insert(state.store->toRealPath(hgInfo.storePath));
+ state.allowedPaths->insert(tree.actualPath);
}
static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial);
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
new file mode 100644
index 000000000..c5a0d9886
--- /dev/null
+++ b/src/libexpr/primops/fetchTree.cc
@@ -0,0 +1,165 @@
+#include "primops.hh"
+#include "eval-inline.hh"
+#include "store-api.hh"
+#include "fetchers.hh"
+#include "filetransfer.hh"
+
+#include <ctime>
+#include <iomanip>
+
+namespace nix {
+
+void emitTreeAttrs(
+ EvalState & state,
+ const fetchers::Tree & tree,
+ std::shared_ptr<const fetchers::Input> input,
+ Value & v)
+{
+ state.mkAttrs(v, 8);
+
+ auto storePath = state.store->printStorePath(tree.storePath);
+
+ mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
+
+ assert(tree.info.narHash);
+ mkString(*state.allocAttr(v, state.symbols.create("narHash")),
+ tree.info.narHash.to_string(SRI));
+
+ if (input->getRev()) {
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), input->getRev()->gitShortRev());
+ }
+
+ if (tree.info.revCount)
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
+
+ if (tree.info.lastModified)
+ mkString(*state.allocAttr(v, state.symbols.create("lastModified")),
+ fmt("%s", std::put_time(std::gmtime(&*tree.info.lastModified), "%Y%m%d%H%M%S")));
+
+ v.attrs->sort();
+}
+
+static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ settings.requireExperimentalFeature("flakes");
+
+ std::shared_ptr<const fetchers::Input> input;
+ PathSet context;
+
+ state.forceValue(*args[0]);
+
+ if (args[0]->type == tAttrs) {
+ state.forceAttrs(*args[0], pos);
+
+ fetchers::Attrs attrs;
+
+ for (auto & attr : *args[0]->attrs) {
+ state.forceValue(*attr.value);
+ if (attr.value->type == tString)
+ attrs.emplace(attr.name, attr.value->string.s);
+ else if (attr.value->type == tBool)
+ attrs.emplace(attr.name, attr.value->boolean);
+ else
+ throw TypeError("fetchTree argument '%s' is %s while a string or Boolean is expected",
+ attr.name, showType(*attr.value));
+ }
+
+ if (!attrs.count("type"))
+ throw Error("attribute 'type' is missing in call to 'fetchTree', at %s", pos);
+
+ input = fetchers::inputFromAttrs(attrs);
+ } else
+ input = fetchers::inputFromURL(state.coerceToString(pos, *args[0], context, false, false));
+
+ if (evalSettings.pureEval && !input->isImmutable())
+ throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input");
+
+ // FIXME: use fetchOrSubstituteTree
+ auto [tree, input2] = input->fetchTree(state.store);
+
+ if (state.allowedPaths)
+ state.allowedPaths->insert(tree.actualPath);
+
+ emitTreeAttrs(state, tree, input2, v);
+}
+
+static RegisterPrimOp r("fetchTree", 1, prim_fetchTree);
+
+static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
+ const string & who, bool unpack, std::string name)
+{
+ std::optional<std::string> url;
+ std::optional<Hash> expectedHash;
+
+ state.forceValue(*args[0]);
+
+ if (args[0]->type == tAttrs) {
+
+ state.forceAttrs(*args[0], pos);
+
+ for (auto & attr : *args[0]->attrs) {
+ string n(attr.name);
+ if (n == "url")
+ url = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "sha256")
+ expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
+ else if (n == "name")
+ name = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else
+ throw EvalError("unsupported argument '%s' to '%s', at %s",
+ attr.name, who, *attr.pos);
+ }
+
+ if (!url)
+ throw EvalError("'url' argument required, at %s", pos);
+
+ } else
+ url = state.forceStringNoCtx(*args[0], pos);
+
+ url = resolveUri(*url);
+
+ state.checkURI(*url);
+
+ if (name == "")
+ name = baseNameOf(*url);
+
+ if (evalSettings.pureEval && !expectedHash)
+ throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
+
+ auto storePath =
+ unpack
+ ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
+ : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
+
+ auto path = state.store->toRealPath(storePath);
+
+ if (expectedHash) {
+ auto hash = unpack
+ ? state.store->queryPathInfo(storePath)->narHash
+ : hashFile(htSHA256, path);
+ if (hash != *expectedHash)
+ throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
+ *url, expectedHash->to_string(), hash.to_string());
+ }
+
+ if (state.allowedPaths)
+ state.allowedPaths->insert(path);
+
+ mkString(v, path, PathSet({path}));
+}
+
+static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ fetch(state, pos, args, v, "fetchurl", false, "");
+}
+
+static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ fetch(state, pos, args, v, "fetchTarball", true, "source");
+}
+
+static RegisterPrimOp r2("__fetchurl", 1, prim_fetchurl);
+static RegisterPrimOp r3("fetchTarball", 1, prim_fetchTarball);
+
+}
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 689373873..71025824e 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -253,12 +253,17 @@ void mkPath(Value & v, const char * s);
#if HAVE_BOEHMGC
-typedef std::vector<Value *, gc_allocator<Value *> > ValueVector;
-typedef std::map<Symbol, Value *, std::less<Symbol>, gc_allocator<std::pair<const Symbol, Value *> > > ValueMap;
+typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
+typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
#else
typedef std::vector<Value *> ValueVector;
typedef std::map<Symbol, Value *> ValueMap;
#endif
+/* A value allocated in traceable memory. */
+typedef std::shared_ptr<Value *> RootValue;
+
+RootValue allocRootValue(Value * v);
+
}
diff --git a/src/libfetchers/attrs.cc b/src/libfetchers/attrs.cc
new file mode 100644
index 000000000..feb0a6085
--- /dev/null
+++ b/src/libfetchers/attrs.cc
@@ -0,0 +1,107 @@
+#include "attrs.hh"
+#include "fetchers.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+Attrs jsonToAttrs(const nlohmann::json & json)
+{
+ Attrs attrs;
+
+ for (auto & i : json.items()) {
+ if (i.value().is_number())
+ attrs.emplace(i.key(), i.value().get<int64_t>());
+ else if (i.value().is_string())
+ attrs.emplace(i.key(), i.value().get<std::string>());
+ else if (i.value().is_boolean())
+ attrs.emplace(i.key(), i.value().get<bool>());
+ else
+ throw Error("unsupported input attribute type in lock file");
+ }
+
+ return attrs;
+}
+
+nlohmann::json attrsToJson(const Attrs & attrs)
+{
+ nlohmann::json json;
+ for (auto & attr : attrs) {
+ if (auto v = std::get_if<int64_t>(&attr.second)) {
+ json[attr.first] = *v;
+ } else if (auto v = std::get_if<std::string>(&attr.second)) {
+ json[attr.first] = *v;
+ } else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
+ json[attr.first] = v->t;
+ } else abort();
+ }
+ return json;
+}
+
+std::optional<std::string> maybeGetStrAttr(const Attrs & attrs, const std::string & name)
+{
+ auto i = attrs.find(name);
+ if (i == attrs.end()) return {};
+ if (auto v = std::get_if<std::string>(&i->second))
+ return *v;
+ throw Error("input attribute '%s' is not a string %s", name, attrsToJson(attrs).dump());
+}
+
+std::string getStrAttr(const Attrs & attrs, const std::string & name)
+{
+ auto s = maybeGetStrAttr(attrs, name);
+ if (!s)
+ throw Error("input attribute '%s' is missing", name);
+ return *s;
+}
+
+std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name)
+{
+ auto i = attrs.find(name);
+ if (i == attrs.end()) return {};
+ if (auto v = std::get_if<int64_t>(&i->second))
+ return *v;
+ throw Error("input attribute '%s' is not an integer", name);
+}
+
+int64_t getIntAttr(const Attrs & attrs, const std::string & name)
+{
+ auto s = maybeGetIntAttr(attrs, name);
+ if (!s)
+ throw Error("input attribute '%s' is missing", name);
+ return *s;
+}
+
+std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name)
+{
+ auto i = attrs.find(name);
+ if (i == attrs.end()) return {};
+ if (auto v = std::get_if<int64_t>(&i->second))
+ return *v;
+ throw Error("input attribute '%s' is not a Boolean", name);
+}
+
+bool getBoolAttr(const Attrs & attrs, const std::string & name)
+{
+ auto s = maybeGetBoolAttr(attrs, name);
+ if (!s)
+ throw Error("input attribute '%s' is missing", name);
+ return *s;
+}
+
+std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
+{
+ std::map<std::string, std::string> query;
+ for (auto & attr : attrs) {
+ if (auto v = std::get_if<int64_t>(&attr.second)) {
+ query.insert_or_assign(attr.first, fmt("%d", *v));
+ } else if (auto v = std::get_if<std::string>(&attr.second)) {
+ query.insert_or_assign(attr.first, *v);
+ } else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
+ query.insert_or_assign(attr.first, v->t ? "1" : "0");
+ } else abort();
+ }
+ return query;
+}
+
+}
diff --git a/src/libfetchers/attrs.hh b/src/libfetchers/attrs.hh
new file mode 100644
index 000000000..d6e0ae000
--- /dev/null
+++ b/src/libfetchers/attrs.hh
@@ -0,0 +1,39 @@
+#pragma once
+
+#include "types.hh"
+
+#include <variant>
+
+#include <nlohmann/json_fwd.hpp>
+
+namespace nix::fetchers {
+
+/* Wrap bools to prevent string literals (i.e. 'char *') from being
+ cast to a bool in Attr. */
+template<typename T>
+struct Explicit {
+ T t;
+};
+
+typedef std::variant<std::string, int64_t, Explicit<bool>> Attr;
+typedef std::map<std::string, Attr> Attrs;
+
+Attrs jsonToAttrs(const nlohmann::json & json);
+
+nlohmann::json attrsToJson(const Attrs & attrs);
+
+std::optional<std::string> maybeGetStrAttr(const Attrs & attrs, const std::string & name);
+
+std::string getStrAttr(const Attrs & attrs, const std::string & name);
+
+std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name);
+
+int64_t getIntAttr(const Attrs & attrs, const std::string & name);
+
+std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name);
+
+bool getBoolAttr(const Attrs & attrs, const std::string & name);
+
+std::map<std::string, std::string> attrsToQuery(const Attrs & attrs);
+
+}
diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc
new file mode 100644
index 000000000..e1c7f3dee
--- /dev/null
+++ b/src/libfetchers/cache.cc
@@ -0,0 +1,121 @@
+#include "cache.hh"
+#include "sqlite.hh"
+#include "sync.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+static const char * schema = R"sql(
+
+create table if not exists Cache (
+ input text not null,
+ info text not null,
+ path text not null,
+ immutable integer not null,
+ timestamp integer not null,
+ primary key (input)
+);
+)sql";
+
+struct CacheImpl : Cache
+{
+ struct State
+ {
+ SQLite db;
+ SQLiteStmt add, lookup;
+ };
+
+ Sync<State> _state;
+
+ CacheImpl()
+ {
+ auto state(_state.lock());
+
+ auto dbPath = getCacheDir() + "/nix/fetcher-cache-v1.sqlite";
+ createDirs(dirOf(dbPath));
+
+ state->db = SQLite(dbPath);
+ state->db.isCache();
+ state->db.exec(schema);
+
+ state->add.create(state->db,
+ "insert or replace into Cache(input, info, path, immutable, timestamp) values (?, ?, ?, ?, ?)");
+
+ state->lookup.create(state->db,
+ "select info, path, immutable, timestamp from Cache where input = ?");
+ }
+
+ void add(
+ ref<Store> store,
+ const Attrs & inAttrs,
+ const Attrs & infoAttrs,
+ const StorePath & storePath,
+ bool immutable) override
+ {
+ _state.lock()->add.use()
+ (attrsToJson(inAttrs).dump())
+ (attrsToJson(infoAttrs).dump())
+ (store->printStorePath(storePath))
+ (immutable)
+ (time(0)).exec();
+ }
+
+ std::optional<std::pair<Attrs, StorePath>> lookup(
+ ref<Store> store,
+ const Attrs & inAttrs) override
+ {
+ if (auto res = lookupExpired(store, inAttrs)) {
+ if (!res->expired)
+ return std::make_pair(std::move(res->infoAttrs), std::move(res->storePath));
+ debug("ignoring expired cache entry '%s'",
+ attrsToJson(inAttrs).dump());
+ }
+ return {};
+ }
+
+ std::optional<Result> lookupExpired(
+ ref<Store> store,
+ const Attrs & inAttrs) override
+ {
+ auto state(_state.lock());
+
+ auto inAttrsJson = attrsToJson(inAttrs).dump();
+
+ auto stmt(state->lookup.use()(inAttrsJson));
+ if (!stmt.next()) {
+ debug("did not find cache entry for '%s'", inAttrsJson);
+ return {};
+ }
+
+ auto infoJson = stmt.getStr(0);
+ auto storePath = store->parseStorePath(stmt.getStr(1));
+ auto immutable = stmt.getInt(2) != 0;
+ auto timestamp = stmt.getInt(3);
+
+ store->addTempRoot(storePath);
+ if (!store->isValidPath(storePath)) {
+ // FIXME: we could try to substitute 'storePath'.
+ debug("ignoring disappeared cache entry '%s'", inAttrsJson);
+ return {};
+ }
+
+ debug("using cache entry '%s' -> '%s', '%s'",
+ inAttrsJson, infoJson, store->printStorePath(storePath));
+
+ return Result {
+ .expired = !immutable && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
+ .infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJson)),
+ .storePath = std::move(storePath)
+ };
+ }
+};
+
+ref<Cache> getCache()
+{
+ static auto cache = std::make_shared<CacheImpl>();
+ return ref<Cache>(cache);
+}
+
+}
diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh
new file mode 100644
index 000000000..d76ab1233
--- /dev/null
+++ b/src/libfetchers/cache.hh
@@ -0,0 +1,34 @@
+#pragma once
+
+#include "fetchers.hh"
+
+namespace nix::fetchers {
+
+struct Cache
+{
+ virtual void add(
+ ref<Store> store,
+ const Attrs & inAttrs,
+ const Attrs & infoAttrs,
+ const StorePath & storePath,
+ bool immutable) = 0;
+
+ virtual std::optional<std::pair<Attrs, StorePath>> lookup(
+ ref<Store> store,
+ const Attrs & inAttrs) = 0;
+
+ struct Result
+ {
+ bool expired = false;
+ Attrs infoAttrs;
+ StorePath storePath;
+ };
+
+ virtual std::optional<Result> lookupExpired(
+ ref<Store> store,
+ const Attrs & inAttrs) = 0;
+};
+
+ref<Cache> getCache();
+
+}
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
new file mode 100644
index 000000000..94ac30e38
--- /dev/null
+++ b/src/libfetchers/fetchers.cc
@@ -0,0 +1,75 @@
+#include "fetchers.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+std::unique_ptr<std::vector<std::unique_ptr<InputScheme>>> inputSchemes = nullptr;
+
+void registerInputScheme(std::unique_ptr<InputScheme> && inputScheme)
+{
+ if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::unique_ptr<InputScheme>>>();
+ inputSchemes->push_back(std::move(inputScheme));
+}
+
+std::unique_ptr<Input> inputFromURL(const ParsedURL & url)
+{
+ for (auto & inputScheme : *inputSchemes) {
+ auto res = inputScheme->inputFromURL(url);
+ if (res) return res;
+ }
+ throw Error("input '%s' is unsupported", url.url);
+}
+
+std::unique_ptr<Input> inputFromURL(const std::string & url)
+{
+ return inputFromURL(parseURL(url));
+}
+
+std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
+{
+ auto attrs2(attrs);
+ attrs2.erase("narHash");
+ for (auto & inputScheme : *inputSchemes) {
+ auto res = inputScheme->inputFromAttrs(attrs2);
+ if (res) {
+ if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
+ // FIXME: require SRI hash.
+ res->narHash = Hash(*narHash);
+ return res;
+ }
+ }
+ throw Error("input '%s' is unsupported", attrsToJson(attrs));
+}
+
+Attrs Input::toAttrs() const
+{
+ auto attrs = toAttrsInternal();
+ if (narHash)
+ attrs.emplace("narHash", narHash->to_string(SRI));
+ attrs.emplace("type", type());
+ return attrs;
+}
+
+std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store) const
+{
+ auto [tree, input] = fetchTreeInternal(store);
+
+ if (tree.actualPath == "")
+ tree.actualPath = store->toRealPath(tree.storePath);
+
+ if (!tree.info.narHash)
+ tree.info.narHash = store->queryPathInfo(tree.storePath)->narHash;
+
+ if (input->narHash)
+ assert(input->narHash == tree.info.narHash);
+
+ if (narHash && narHash != input->narHash)
+ throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
+ to_string(), tree.actualPath, narHash->to_string(SRI), input->narHash->to_string(SRI));
+
+ return {std::move(tree), input};
+}
+
+}
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
new file mode 100644
index 000000000..59a58ae67
--- /dev/null
+++ b/src/libfetchers/fetchers.hh
@@ -0,0 +1,103 @@
+#pragma once
+
+#include "types.hh"
+#include "hash.hh"
+#include "path.hh"
+#include "tree-info.hh"
+#include "attrs.hh"
+#include "url.hh"
+
+#include <memory>
+
+namespace nix { class Store; }
+
+namespace nix::fetchers {
+
+struct Input;
+
+struct Tree
+{
+ Path actualPath;
+ StorePath storePath;
+ TreeInfo info;
+};
+
+struct Input : std::enable_shared_from_this<Input>
+{
+ std::optional<Hash> narHash; // FIXME: implement
+
+ virtual std::string type() const = 0;
+
+ virtual ~Input() { }
+
+ virtual bool operator ==(const Input & other) const { return false; }
+
+ /* Check whether this is a "direct" input, that is, not
+ one that goes through a registry. */
+ virtual bool isDirect() const { return true; }
+
+ /* Check whether this is an "immutable" input, that is,
+ one that contains a commit hash or content hash. */
+ virtual bool isImmutable() const { return (bool) narHash; }
+
+ virtual bool contains(const Input & other) const { return false; }
+
+ virtual std::optional<std::string> getRef() const { return {}; }
+
+ virtual std::optional<Hash> getRev() const { return {}; }
+
+ virtual ParsedURL toURL() const = 0;
+
+ std::string to_string() const
+ {
+ return toURL().to_string();
+ }
+
+ Attrs toAttrs() const;
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTree(ref<Store> store) const;
+
+private:
+
+ virtual std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(ref<Store> store) const = 0;
+
+ virtual Attrs toAttrsInternal() const = 0;
+};
+
+struct InputScheme
+{
+ virtual ~InputScheme() { }
+
+ virtual std::unique_ptr<Input> inputFromURL(const ParsedURL & url) = 0;
+
+ virtual std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) = 0;
+};
+
+std::unique_ptr<Input> inputFromURL(const ParsedURL & url);
+
+std::unique_ptr<Input> inputFromURL(const std::string & url);
+
+std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs);
+
+void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
+
+struct DownloadFileResult
+{
+ StorePath storePath;
+ std::string etag;
+ std::string effectiveUrl;
+};
+
+DownloadFileResult downloadFile(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable);
+
+Tree downloadTarball(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable);
+
+}
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
new file mode 100644
index 000000000..7c18cf67f
--- /dev/null
+++ b/src/libfetchers/git.cc
@@ -0,0 +1,438 @@
+#include "fetchers.hh"
+#include "cache.hh"
+#include "globals.hh"
+#include "tarfile.hh"
+#include "store-api.hh"
+
+#include <sys/time.h>
+
+using namespace std::string_literals;
+
+namespace nix::fetchers {
+
+static std::string readHead(const Path & path)
+{
+ return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
+}
+
+static bool isNotDotGitDirectory(const Path & path)
+{
+ static const std::regex gitDirRegex("^(?:.*/)?\\.git$");
+
+ return not std::regex_match(path, gitDirRegex);
+}
+
+struct GitInput : Input
+{
+ ParsedURL url;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+ bool shallow = false;
+ bool submodules = false;
+
+ GitInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "git"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const GitInput *>(&other);
+ return
+ other2
+ && url == other2->url
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev || narHash;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ ParsedURL toURL() const override
+ {
+ ParsedURL url2(url);
+ if (url2.scheme != "git") url2.scheme = "git+" + url2.scheme;
+ if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
+ if (ref) url2.query.insert_or_assign("ref", *ref);
+ if (shallow) url2.query.insert_or_assign("shallow", "1");
+ return url2;
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ if (shallow)
+ attrs.emplace("shallow", true);
+ if (submodules)
+ attrs.emplace("submodules", true);
+ return attrs;
+ }
+
+ std::pair<bool, std::string> getActualUrl() const
+ {
+ // Don't clone file:// URIs (but otherwise treat them the
+ // same as remote URIs, i.e. don't use the working tree or
+ // HEAD).
+ static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
+ bool isLocal = url.scheme == "file" && !forceHttp;
+ return {isLocal, isLocal ? url.path : url.base};
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto name = "source";
+
+ auto input = std::make_shared<GitInput>(*this);
+
+ assert(!rev || rev->type == htSHA1);
+
+ std::string cacheType = "git";
+ if (shallow) cacheType += "-shallow";
+ if (submodules) cacheType += "-submodules";
+
+ auto getImmutableAttrs = [&]()
+ {
+ return Attrs({
+ {"type", cacheType},
+ {"name", name},
+ {"rev", input->rev->gitRev()},
+ });
+ };
+
+ auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
+ -> std::pair<Tree, std::shared_ptr<const Input>>
+ {
+ assert(input->rev);
+ assert(!rev || rev == input->rev);
+ return {
+ Tree {
+ .actualPath = store->toRealPath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = shallow ? std::nullopt : std::optional(getIntAttr(infoAttrs, "revCount")),
+ .lastModified = getIntAttr(infoAttrs, "lastModified"),
+ },
+ },
+ input
+ };
+ };
+
+ if (rev) {
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+ }
+
+ auto [isLocal, actualUrl_] = getActualUrl();
+ auto actualUrl = actualUrl_; // work around clang bug
+
+ // If this is a local directory and no ref or revision is
+ // given, then allow the use of an unclean working tree.
+ if (!input->ref && !input->rev && isLocal) {
+ bool clean = false;
+
+ /* Check whether this repo has any commits. There are
+ probably better ways to do this. */
+ auto gitDir = actualUrl + "/.git";
+ auto commonGitDir = chomp(runProgram(
+ "git",
+ true,
+ { "-C", actualUrl, "rev-parse", "--git-common-dir" }
+ ));
+ if (commonGitDir != ".git")
+ gitDir = commonGitDir;
+
+ bool haveCommits = !readDirectory(gitDir + "/refs/heads").empty();
+
+ try {
+ if (haveCommits) {
+ runProgram("git", true, { "-C", actualUrl, "diff-index", "--quiet", "HEAD", "--" });
+ clean = true;
+ }
+ } catch (ExecError & e) {
+ if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
+ }
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked files. */
+
+ if (!settings.allowDirty)
+ throw Error("Git tree '%s' is dirty", actualUrl);
+
+ if (settings.warnDirty)
+ warn("Git tree '%s' is dirty", actualUrl);
+
+ auto gitOpts = Strings({ "-C", actualUrl, "ls-files", "-z" });
+ if (submodules)
+ gitOpts.emplace_back("--recurse-submodules");
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("git", true, gitOpts), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, actualUrl));
+ std::string file(p, actualUrl.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto prefix = file + "/";
+ auto i = files.lower_bound(prefix);
+ return i != files.end() && hasPrefix(*i, prefix);
+ }
+
+ return files.count(file);
+ };
+
+ auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
+
+ auto tree = Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ // FIXME: maybe we should use the timestamp of the last
+ // modified dirty file?
+ .lastModified = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0,
+ }
+ };
+
+ return {std::move(tree), input};
+ }
+ }
+
+ if (!input->ref) input->ref = isLocal ? readHead(actualUrl) : "master";
+
+ Attrs mutableAttrs({
+ {"type", cacheType},
+ {"name", name},
+ {"url", actualUrl},
+ {"ref", *input->ref},
+ });
+
+ Path repoDir;
+
+ if (isLocal) {
+
+ if (!input->rev)
+ input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
+
+ repoDir = actualUrl;
+
+ } else {
+
+ if (auto res = getCache()->lookup(store, mutableAttrs)) {
+ auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
+ if (!rev || rev == rev2) {
+ input->rev = rev2;
+ return makeResult(res->first, std::move(res->second));
+ }
+ }
+
+ Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
+ repoDir = cacheDir;
+
+ if (!pathExists(cacheDir)) {
+ createDirs(dirOf(cacheDir));
+ runProgram("git", true, { "init", "--bare", repoDir });
+ }
+
+ Path localRefFile =
+ input->ref->compare(0, 5, "refs/") == 0
+ ? cacheDir + "/" + *input->ref
+ : cacheDir + "/refs/heads/" + *input->ref;
+
+ bool doFetch;
+ time_t now = time(0);
+
+ /* If a rev was specified, we need to fetch if it's not in the
+ repo. */
+ if (input->rev) {
+ try {
+ runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input->rev->gitRev() });
+ doFetch = false;
+ } catch (ExecError & e) {
+ if (WIFEXITED(e.status)) {
+ doFetch = true;
+ } else {
+ throw;
+ }
+ }
+ } else {
+ /* If the local ref is older than ‘tarball-ttl’ seconds, do a
+ git fetch to update the local ref to the remote ref. */
+ struct stat st;
+ doFetch = stat(localRefFile.c_str(), &st) != 0 ||
+ (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
+ }
+
+ if (doFetch) {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
+
+ // FIXME: git stderr messes up our progress indicator, so
+ // we're using --quiet for now. Should process its stderr.
+ try {
+ runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", *input->ref, *input->ref) });
+ } catch (Error & e) {
+ if (!pathExists(localRefFile)) throw;
+ warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
+ }
+
+ struct timeval times[2];
+ times[0].tv_sec = now;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = now;
+ times[1].tv_usec = 0;
+
+ utimes(localRefFile.c_str(), times);
+ }
+
+ if (!input->rev)
+ input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
+ }
+
+ bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
+
+ if (isShallow && !shallow)
+ throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
+
+ // FIXME: check whether rev is an ancestor of ref.
+
+ printTalkative("using revision %s of repo '%s'", input->rev->gitRev(), actualUrl);
+
+ /* Now that we know the ref, check again whether we have it in
+ the store. */
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+
+ Path tmpDir = createTempDir();
+ AutoDelete delTmpDir(tmpDir, true);
+ PathFilter filter = defaultPathFilter;
+
+ if (submodules) {
+ Path tmpGitDir = createTempDir();
+ AutoDelete delTmpGitDir(tmpGitDir, true);
+
+ runProgram("git", true, { "init", tmpDir, "--separate-git-dir", tmpGitDir });
+ // TODO: repoDir might lack the ref (it only checks if rev
+ // exists, see FIXME above) so use a big hammer and fetch
+ // everything to ensure we get the rev.
+ runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
+ "--update-head-ok", "--", repoDir, "refs/*:refs/*" });
+
+ runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input->rev->gitRev() });
+ runProgram("git", true, { "-C", tmpDir, "remote", "add", "origin", actualUrl });
+ runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
+
+ filter = isNotDotGitDirectory;
+ } else {
+ // FIXME: should pipe this, or find some better way to extract a
+ // revision.
+ auto source = sinkToSource([&](Sink & sink) {
+ RunOptions gitOptions("git", { "-C", repoDir, "archive", input->rev->gitRev() });
+ gitOptions.standardOut = &sink;
+ runProgram2(gitOptions);
+ });
+
+ unpackTarfile(*source, tmpDir);
+ }
+
+ auto storePath = store->addToStore(name, tmpDir, true, htSHA256, filter);
+
+ auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
+
+ Attrs infoAttrs({
+ {"rev", input->rev->gitRev()},
+ {"lastModified", lastModified},
+ });
+
+ if (!shallow)
+ infoAttrs.insert_or_assign("revCount",
+ std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input->rev->gitRev() })));
+
+ if (!this->rev)
+ getCache()->add(
+ store,
+ mutableAttrs,
+ infoAttrs,
+ storePath,
+ false);
+
+ getCache()->add(
+ store,
+ getImmutableAttrs(),
+ infoAttrs,
+ storePath,
+ true);
+
+ return makeResult(infoAttrs, std::move(storePath));
+ }
+};
+
+struct GitInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "git" &&
+ url.scheme != "git+http" &&
+ url.scheme != "git+https" &&
+ url.scheme != "git+ssh" &&
+ url.scheme != "git+file") return nullptr;
+
+ auto url2(url);
+ if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
+ url2.query.clear();
+
+ Attrs attrs;
+ attrs.emplace("type", "git");
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev" || name == "ref")
+ attrs.emplace(name, value);
+ else
+ url2.query.emplace(name, value);
+ }
+
+ attrs.emplace("url", url2.to_string());
+
+ return inputFromAttrs(attrs);
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "git") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules")
+ throw Error("unsupported Git input attribute '%s'", name);
+
+ auto input = std::make_unique<GitInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto ref = maybeGetStrAttr(attrs, "ref")) {
+ if (!std::regex_match(*ref, refRegex))
+ throw BadURL("invalid Git branch/tag name '%s'", *ref);
+ input->ref = *ref;
+ }
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+
+ input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);
+
+ input->submodules = maybeGetBoolAttr(attrs, "submodules").value_or(false);
+
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
+
+}
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
new file mode 100644
index 000000000..8675a5a66
--- /dev/null
+++ b/src/libfetchers/github.cc
@@ -0,0 +1,195 @@
+#include "filetransfer.hh"
+#include "cache.hh"
+#include "fetchers.hh"
+#include "globals.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+std::regex ownerRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
+std::regex repoRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
+
+struct GitHubInput : Input
+{
+ std::string owner;
+ std::string repo;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+
+ std::string type() const override { return "github"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const GitHubInput *>(&other);
+ return
+ other2
+ && owner == other2->owner
+ && repo == other2->repo
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev || narHash;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ ParsedURL toURL() const override
+ {
+ auto path = owner + "/" + repo;
+ assert(!(ref && rev));
+ if (ref) path += "/" + *ref;
+ if (rev) path += "/" + rev->to_string(Base16, false);
+ return ParsedURL {
+ .scheme = "github",
+ .path = path,
+ };
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("owner", owner);
+ attrs.emplace("repo", repo);
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto rev = this->rev;
+ auto ref = this->ref.value_or("master");
+
+ if (!rev) {
+ auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
+ owner, repo, ref);
+ auto json = nlohmann::json::parse(
+ readFile(
+ store->toRealPath(
+ downloadFile(store, url, "source", false).storePath)));
+ rev = Hash(json["sha"], htSHA1);
+ debug("HEAD revision for '%s' is %s", url, rev->gitRev());
+ }
+
+ auto input = std::make_shared<GitHubInput>(*this);
+ input->ref = {};
+ input->rev = *rev;
+
+ Attrs immutableAttrs({
+ {"type", "git-tarball"},
+ {"rev", rev->gitRev()},
+ });
+
+ if (auto res = getCache()->lookup(store, immutableAttrs)) {
+ return {
+ Tree{
+ .actualPath = store->toRealPath(res->second),
+ .storePath = std::move(res->second),
+ .info = TreeInfo {
+ .lastModified = getIntAttr(res->first, "lastModified"),
+ },
+ },
+ input
+ };
+ }
+
+ // FIXME: use regular /archive URLs instead? api.github.com
+ // might have stricter rate limits.
+
+ auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
+ owner, repo, rev->to_string(Base16, false));
+
+ std::string accessToken = settings.githubAccessToken.get();
+ if (accessToken != "")
+ url += "?access_token=" + accessToken;
+
+ auto tree = downloadTarball(store, url, "source", true);
+
+ getCache()->add(
+ store,
+ immutableAttrs,
+ {
+ {"rev", rev->gitRev()},
+ {"lastModified", *tree.info.lastModified}
+ },
+ tree.storePath,
+ true);
+
+ return {std::move(tree), input};
+ }
+};
+
+struct GitHubInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "github") return nullptr;
+
+ auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
+ auto input = std::make_unique<GitHubInput>();
+
+ if (path.size() == 2) {
+ } else if (path.size() == 3) {
+ if (std::regex_match(path[2], revRegex))
+ input->rev = Hash(path[2], htSHA1);
+ else if (std::regex_match(path[2], refRegex))
+ input->ref = path[2];
+ else
+ throw BadURL("in GitHub URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
+ } else
+ throw BadURL("GitHub URL '%s' is invalid", url.url);
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev") {
+ if (input->rev)
+ throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
+ input->rev = Hash(value, htSHA1);
+ }
+ else if (name == "ref") {
+ if (!std::regex_match(value, refRegex))
+ throw BadURL("GitHub URL '%s' contains an invalid branch/tag name", url.url);
+ if (input->ref)
+ throw BadURL("GitHub URL '%s' contains multiple branch/tag names", url.url);
+ input->ref = value;
+ }
+ }
+
+ if (input->ref && input->rev)
+ throw BadURL("GitHub URL '%s' contains both a commit hash and a branch/tag name", url.url);
+
+ input->owner = path[0];
+ input->repo = path[1];
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "github") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev")
+ throw Error("unsupported GitHub input attribute '%s'", name);
+
+ auto input = std::make_unique<GitHubInput>();
+ input->owner = getStrAttr(attrs, "owner");
+ input->repo = getStrAttr(attrs, "repo");
+ input->ref = maybeGetStrAttr(attrs, "ref");
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
+
+}
diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk
new file mode 100644
index 000000000..d7143d8a6
--- /dev/null
+++ b/src/libfetchers/local.mk
@@ -0,0 +1,11 @@
+libraries += libfetchers
+
+libfetchers_NAME = libnixfetchers
+
+libfetchers_DIR := $(d)
+
+libfetchers_SOURCES := $(wildcard $(d)/*.cc)
+
+libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
+
+libfetchers_LIBS = libutil libstore libnixrust
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
new file mode 100644
index 000000000..1d6571571
--- /dev/null
+++ b/src/libfetchers/mercurial.cc
@@ -0,0 +1,303 @@
+#include "fetchers.hh"
+#include "cache.hh"
+#include "globals.hh"
+#include "tarfile.hh"
+#include "store-api.hh"
+
+#include <sys/time.h>
+
+using namespace std::string_literals;
+
+namespace nix::fetchers {
+
+struct MercurialInput : Input
+{
+ ParsedURL url;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+
+ MercurialInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "hg"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const MercurialInput *>(&other);
+ return
+ other2
+ && url == other2->url
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev || narHash;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ ParsedURL toURL() const override
+ {
+ ParsedURL url2(url);
+ url2.scheme = "hg+" + url2.scheme;
+ if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
+ if (ref) url2.query.insert_or_assign("ref", *ref);
+ return url;
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ std::pair<bool, std::string> getActualUrl() const
+ {
+ bool isLocal = url.scheme == "file";
+ return {isLocal, isLocal ? url.path : url.base};
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto name = "source";
+
+ auto input = std::make_shared<MercurialInput>(*this);
+
+ auto [isLocal, actualUrl_] = getActualUrl();
+ auto actualUrl = actualUrl_; // work around clang bug
+
+ // FIXME: return lastModified.
+
+ // FIXME: don't clone local repositories.
+
+ if (!input->ref && !input->rev && isLocal && pathExists(actualUrl + "/.hg")) {
+
+ bool clean = runProgram("hg", true, { "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == "";
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked
+ files. */
+
+ if (!settings.allowDirty)
+ throw Error("Mercurial tree '%s' is unclean", actualUrl);
+
+ if (settings.warnDirty)
+ warn("Mercurial tree '%s' is unclean", actualUrl);
+
+ input->ref = chomp(runProgram("hg", true, { "branch", "-R", actualUrl }));
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("hg", true, { "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, actualUrl));
+ std::string file(p, actualUrl.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto prefix = file + "/";
+ auto i = files.lower_bound(prefix);
+ return i != files.end() && hasPrefix(*i, prefix);
+ }
+
+ return files.count(file);
+ };
+
+ auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
+
+ return {Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ }, input};
+ }
+ }
+
+ if (!input->ref) input->ref = "default";
+
+ auto getImmutableAttrs = [&]()
+ {
+ return Attrs({
+ {"type", "hg"},
+ {"name", name},
+ {"rev", input->rev->gitRev()},
+ });
+ };
+
+ auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
+ -> std::pair<Tree, std::shared_ptr<const Input>>
+ {
+ assert(input->rev);
+ assert(!rev || rev == input->rev);
+ return {
+ Tree{
+ .actualPath = store->toRealPath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = getIntAttr(infoAttrs, "revCount"),
+ },
+ },
+ input
+ };
+ };
+
+ if (input->rev) {
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+ }
+
+ assert(input->rev || input->ref);
+ auto revOrRef = input->rev ? input->rev->gitRev() : *input->ref;
+
+ Attrs mutableAttrs({
+ {"type", "hg"},
+ {"name", name},
+ {"url", actualUrl},
+ {"ref", *input->ref},
+ });
+
+ if (auto res = getCache()->lookup(store, mutableAttrs)) {
+ auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
+ if (!rev || rev == rev2) {
+ input->rev = rev2;
+ return makeResult(res->first, std::move(res->second));
+ }
+ }
+
+ Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
+
+ /* If this is a commit hash that we already have, we don't
+ have to pull again. */
+ if (!(input->rev
+ && pathExists(cacheDir)
+ && runProgram(
+ RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
+ .killStderr(true)).second == "1"))
+ {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
+
+ if (pathExists(cacheDir)) {
+ try {
+ runProgram("hg", true, { "pull", "-R", cacheDir, "--", actualUrl });
+ }
+ catch (ExecError & e) {
+ string transJournal = cacheDir + "/.hg/store/journal";
+ /* hg throws "abandoned transaction" error only if this file exists */
+ if (pathExists(transJournal)) {
+ runProgram("hg", true, { "recover", "-R", cacheDir });
+ runProgram("hg", true, { "pull", "-R", cacheDir, "--", actualUrl });
+ } else {
+ throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status)));
+ }
+ }
+ } else {
+ createDirs(dirOf(cacheDir));
+ runProgram("hg", true, { "clone", "--noupdate", "--", actualUrl, cacheDir });
+ }
+ }
+
+ auto tokens = tokenizeString<std::vector<std::string>>(
+ runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
+ assert(tokens.size() == 3);
+
+ input->rev = Hash(tokens[0], htSHA1);
+ auto revCount = std::stoull(tokens[1]);
+ input->ref = tokens[2];
+
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+
+ Path tmpDir = createTempDir();
+ AutoDelete delTmpDir(tmpDir, true);
+
+ runProgram("hg", true, { "archive", "-R", cacheDir, "-r", input->rev->gitRev(), tmpDir });
+
+ deletePath(tmpDir + "/.hg_archival.txt");
+
+ auto storePath = store->addToStore(name, tmpDir);
+
+ Attrs infoAttrs({
+ {"rev", input->rev->gitRev()},
+ {"revCount", (int64_t) revCount},
+ });
+
+ if (!this->rev)
+ getCache()->add(
+ store,
+ mutableAttrs,
+ infoAttrs,
+ storePath,
+ false);
+
+ getCache()->add(
+ store,
+ getImmutableAttrs(),
+ infoAttrs,
+ storePath,
+ true);
+
+ return makeResult(infoAttrs, std::move(storePath));
+ }
+};
+
+struct MercurialInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "hg+http" &&
+ url.scheme != "hg+https" &&
+ url.scheme != "hg+ssh" &&
+ url.scheme != "hg+file") return nullptr;
+
+ auto url2(url);
+ url2.scheme = std::string(url2.scheme, 3);
+ url2.query.clear();
+
+ Attrs attrs;
+ attrs.emplace("type", "hg");
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev" || name == "ref")
+ attrs.emplace(name, value);
+ else
+ url2.query.emplace(name, value);
+ }
+
+ attrs.emplace("url", url2.to_string());
+
+ return inputFromAttrs(attrs);
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "hg") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "ref" && name != "rev")
+ throw Error("unsupported Mercurial input attribute '%s'", name);
+
+ auto input = std::make_unique<MercurialInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto ref = maybeGetStrAttr(attrs, "ref")) {
+ if (!std::regex_match(*ref, refRegex))
+ throw BadURL("invalid Mercurial branch/tag name '%s'", *ref);
+ input->ref = *ref;
+ }
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
+
+}
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
new file mode 100644
index 000000000..ba2cc192e
--- /dev/null
+++ b/src/libfetchers/path.cc
@@ -0,0 +1,148 @@
+#include "fetchers.hh"
+#include "store-api.hh"
+
+namespace nix::fetchers {
+
+struct PathInput : Input
+{
+ Path path;
+
+ /* Allow the user to pass in "fake" tree info attributes. This is
+ useful for making a pinned tree work the same as the repository
+ from which is exported
+ (e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
+ std::optional<Hash> rev;
+ std::optional<uint64_t> revCount;
+ std::optional<time_t> lastModified;
+
+ std::string type() const override { return "path"; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const PathInput *>(&other);
+ return
+ other2
+ && path == other2->path
+ && rev == other2->rev
+ && revCount == other2->revCount
+ && lastModified == other2->lastModified;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) narHash;
+ }
+
+ ParsedURL toURL() const override
+ {
+ auto query = attrsToQuery(toAttrsInternal());
+ query.erase("path");
+ return ParsedURL {
+ .scheme = "path",
+ .path = path,
+ .query = query,
+ };
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("path", path);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ if (revCount)
+ attrs.emplace("revCount", *revCount);
+ if (lastModified)
+ attrs.emplace("lastModified", *lastModified);
+ return attrs;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto input = std::make_shared<PathInput>(*this);
+
+ // FIXME: check whether access to 'path' is allowed.
+
+ auto storePath = store->maybeParseStorePath(path);
+
+ if (storePath)
+ store->addTempRoot(*storePath);
+
+ if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath))
+ // FIXME: try to substitute storePath.
+ storePath = store->addToStore("source", path);
+
+ return
+ {
+ Tree {
+ .actualPath = store->toRealPath(*storePath),
+ .storePath = std::move(*storePath),
+ .info = TreeInfo {
+ .revCount = revCount,
+ .lastModified = lastModified
+ }
+ },
+ input
+ };
+ }
+
+};
+
+struct PathInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "path") return nullptr;
+
+ auto input = std::make_unique<PathInput>();
+ input->path = url.path;
+
+ for (auto & [name, value] : url.query)
+ if (name == "rev")
+ input->rev = Hash(value, htSHA1);
+ else if (name == "revCount") {
+ uint64_t revCount;
+ if (!string2Int(value, revCount))
+ throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
+ input->revCount = revCount;
+ }
+ else if (name == "lastModified") {
+ time_t lastModified;
+ if (!string2Int(value, lastModified))
+ throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
+ input->lastModified = lastModified;
+ }
+ else
+ throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "path") return {};
+
+ auto input = std::make_unique<PathInput>();
+ input->path = getStrAttr(attrs, "path");
+
+ for (auto & [name, value] : attrs)
+ if (name == "rev")
+ input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1);
+ else if (name == "revCount")
+ input->revCount = getIntAttr(attrs, "revCount");
+ else if (name == "lastModified")
+ input->lastModified = getIntAttr(attrs, "lastModified");
+ else if (name == "type" || name == "path")
+ ;
+ else
+ throw Error("unsupported path input attribute '%s'", name);
+
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
+
+}
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
new file mode 100644
index 000000000..695525b31
--- /dev/null
+++ b/src/libfetchers/tarball.cc
@@ -0,0 +1,275 @@
+#include "fetchers.hh"
+#include "cache.hh"
+#include "filetransfer.hh"
+#include "globals.hh"
+#include "store-api.hh"
+#include "archive.hh"
+#include "tarfile.hh"
+
+namespace nix::fetchers {
+
+DownloadFileResult downloadFile(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable)
+{
+ // FIXME: check store
+
+ Attrs inAttrs({
+ {"type", "file"},
+ {"url", url},
+ {"name", name},
+ });
+
+ auto cached = getCache()->lookupExpired(store, inAttrs);
+
+ auto useCached = [&]() -> DownloadFileResult
+ {
+ return {
+ .storePath = std::move(cached->storePath),
+ .etag = getStrAttr(cached->infoAttrs, "etag"),
+ .effectiveUrl = getStrAttr(cached->infoAttrs, "url")
+ };
+ };
+
+ if (cached && !cached->expired)
+ return useCached();
+
+ FileTransferRequest request(url);
+ if (cached)
+ request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
+ FileTransferResult res;
+ try {
+ res = getFileTransfer()->download(request);
+ } catch (FileTransferError & e) {
+ if (cached) {
+ warn("%s; using cached version", e.msg());
+ return useCached();
+ } else
+ throw;
+ }
+
+ // FIXME: write to temporary file.
+
+ Attrs infoAttrs({
+ {"etag", res.etag},
+ {"url", res.effectiveUri},
+ });
+
+ std::optional<StorePath> storePath;
+
+ if (res.cached) {
+ assert(cached);
+ assert(request.expectedETag == res.etag);
+ storePath = std::move(cached->storePath);
+ } else {
+ StringSink sink;
+ dumpString(*res.data, sink);
+ auto hash = hashString(htSHA256, *res.data);
+ ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
+ info.narHash = hashString(htSHA256, *sink.s);
+ info.narSize = sink.s->size();
+ info.ca = makeFixedOutputCA(false, hash);
+ store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
+ storePath = std::move(info.path);
+ }
+
+ getCache()->add(
+ store,
+ inAttrs,
+ infoAttrs,
+ *storePath,
+ immutable);
+
+ if (url != res.effectiveUri)
+ getCache()->add(
+ store,
+ {
+ {"type", "file"},
+ {"url", res.effectiveUri},
+ {"name", name},
+ },
+ infoAttrs,
+ *storePath,
+ immutable);
+
+ return {
+ .storePath = std::move(*storePath),
+ .etag = res.etag,
+ .effectiveUrl = res.effectiveUri,
+ };
+}
+
+Tree downloadTarball(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable)
+{
+ Attrs inAttrs({
+ {"type", "tarball"},
+ {"url", url},
+ {"name", name},
+ });
+
+ auto cached = getCache()->lookupExpired(store, inAttrs);
+
+ if (cached && !cached->expired)
+ return Tree {
+ .actualPath = store->toRealPath(cached->storePath),
+ .storePath = std::move(cached->storePath),
+ .info = TreeInfo {
+ .lastModified = getIntAttr(cached->infoAttrs, "lastModified"),
+ },
+ };
+
+ auto res = downloadFile(store, url, name, immutable);
+
+ std::optional<StorePath> unpackedStorePath;
+ time_t lastModified;
+
+ if (cached && res.etag != "" && getStrAttr(cached->infoAttrs, "etag") == res.etag) {
+ unpackedStorePath = std::move(cached->storePath);
+ lastModified = getIntAttr(cached->infoAttrs, "lastModified");
+ } else {
+ Path tmpDir = createTempDir();
+ AutoDelete autoDelete(tmpDir, true);
+ unpackTarfile(store->toRealPath(res.storePath), tmpDir);
+ auto members = readDirectory(tmpDir);
+ if (members.size() != 1)
+ throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
+ auto topDir = tmpDir + "/" + members.begin()->name;
+ lastModified = lstat(topDir).st_mtime;
+ unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
+ }
+
+ Attrs infoAttrs({
+ {"lastModified", lastModified},
+ {"etag", res.etag},
+ });
+
+ getCache()->add(
+ store,
+ inAttrs,
+ infoAttrs,
+ *unpackedStorePath,
+ immutable);
+
+ return Tree {
+ .actualPath = store->toRealPath(*unpackedStorePath),
+ .storePath = std::move(*unpackedStorePath),
+ .info = TreeInfo {
+ .lastModified = lastModified,
+ },
+ };
+}
+
+struct TarballInput : Input
+{
+ ParsedURL url;
+ std::optional<Hash> hash;
+
+ TarballInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "tarball"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const TarballInput *>(&other);
+ return
+ other2
+ && to_string() == other2->to_string()
+ && hash == other2->hash;
+ }
+
+ bool isImmutable() const override
+ {
+ return hash || narHash;
+ }
+
+ ParsedURL toURL() const override
+ {
+ auto url2(url);
+ // NAR hashes are preferred over file hashes since tar/zip files
+ // don't have a canonical representation.
+ if (narHash)
+ url2.query.insert_or_assign("narHash", narHash->to_string(SRI));
+ else if (hash)
+ url2.query.insert_or_assign("hash", hash->to_string(SRI));
+ return url2;
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (hash)
+ attrs.emplace("hash", hash->to_string(SRI));
+ return attrs;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto tree = downloadTarball(store, url.to_string(), "source", false);
+
+ auto input = std::make_shared<TarballInput>(*this);
+ input->narHash = store->queryPathInfo(tree.storePath)->narHash;
+
+ return {std::move(tree), input};
+ }
+};
+
+struct TarballInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return nullptr;
+
+ if (!hasSuffix(url.path, ".zip")
+ && !hasSuffix(url.path, ".tar")
+ && !hasSuffix(url.path, ".tar.gz")
+ && !hasSuffix(url.path, ".tar.xz")
+ && !hasSuffix(url.path, ".tar.bz2"))
+ return nullptr;
+
+ auto input = std::make_unique<TarballInput>(url);
+
+ auto hash = input->url.query.find("hash");
+ if (hash != input->url.query.end()) {
+ // FIXME: require SRI hash.
+ input->hash = Hash(hash->second);
+ input->url.query.erase(hash);
+ }
+
+ auto narHash = input->url.query.find("narHash");
+ if (narHash != input->url.query.end()) {
+ // FIXME: require SRI hash.
+ input->narHash = Hash(narHash->second);
+ input->url.query.erase(narHash);
+ }
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "hash")
+ throw Error("unsupported tarball input attribute '%s'", name);
+
+ auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto hash = maybeGetStrAttr(attrs, "hash"))
+ // FIXME: require SRI hash.
+ input->hash = Hash(*hash);
+
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
+
+}
diff --git a/src/libfetchers/tree-info.cc b/src/libfetchers/tree-info.cc
new file mode 100644
index 000000000..5788e94a1
--- /dev/null
+++ b/src/libfetchers/tree-info.cc
@@ -0,0 +1,14 @@
+#include "tree-info.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+StorePath TreeInfo::computeStorePath(Store & store) const
+{
+ assert(narHash);
+ return store.makeFixedOutputPath(true, narHash, "source");
+}
+
+}
diff --git a/src/libfetchers/tree-info.hh b/src/libfetchers/tree-info.hh
new file mode 100644
index 000000000..2c7347281
--- /dev/null
+++ b/src/libfetchers/tree-info.hh
@@ -0,0 +1,29 @@
+#pragma once
+
+#include "path.hh"
+#include "hash.hh"
+
+#include <nlohmann/json_fwd.hpp>
+
+namespace nix { class Store; }
+
+namespace nix::fetchers {
+
+struct TreeInfo
+{
+ Hash narHash;
+ std::optional<uint64_t> revCount;
+ std::optional<time_t> lastModified;
+
+ bool operator ==(const TreeInfo & other) const
+ {
+ return
+ narHash == other.narHash
+ && revCount == other.revCount
+ && lastModified == other.lastModified;
+ }
+
+ StorePath computeStorePath(Store & store) const;
+};
+
+}
diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc
index 9e1d7cee6..51e199ea5 100644
--- a/src/libmain/common-args.cc
+++ b/src/libmain/common-args.cc
@@ -6,43 +6,47 @@ namespace nix {
MixCommonArgs::MixCommonArgs(const string & programName)
: programName(programName)
{
- mkFlag()
- .longName("verbose")
- .shortName('v')
- .description("increase verbosity level")
- .handler([]() { verbosity = (Verbosity) (verbosity + 1); });
-
- mkFlag()
- .longName("quiet")
- .description("decrease verbosity level")
- .handler([]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; });
-
- mkFlag()
- .longName("debug")
- .description("enable debug output")
- .handler([]() { verbosity = lvlDebug; });
-
- mkFlag()
- .longName("option")
- .labels({"name", "value"})
- .description("set a Nix configuration option (overriding nix.conf)")
- .arity(2)
- .handler([](std::vector<std::string> ss) {
+ addFlag({
+ .longName = "verbose",
+ .shortName = 'v',
+ .description = "increase verbosity level",
+ .handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
+ });
+
+ addFlag({
+ .longName = "quiet",
+ .description = "decrease verbosity level",
+ .handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
+ });
+
+ addFlag({
+ .longName = "debug",
+ .description = "enable debug output",
+ .handler = {[]() { verbosity = lvlDebug; }},
+ });
+
+ addFlag({
+ .longName = "option",
+ .description = "set a Nix configuration option (overriding nix.conf)",
+ .labels = {"name", "value"},
+ .handler = {[](std::string name, std::string value) {
try {
- globalConfig.set(ss[0], ss[1]);
+ globalConfig.set(name, value);
} catch (UsageError & e) {
warn(e.what());
}
- });
-
- mkFlag()
- .longName("max-jobs")
- .shortName('j')
- .label("jobs")
- .description("maximum number of parallel builds")
- .handler([=](std::string s) {
+ }},
+ });
+
+ addFlag({
+ .longName = "max-jobs",
+ .shortName = 'j',
+ .description = "maximum number of parallel builds",
+ .labels = Strings{"jobs"},
+ .handler = {[=](std::string s) {
settings.set("max-jobs", s);
- });
+ }}
+ });
std::string cat = "config";
globalConfig.convertToArgs(*this, cat);
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index d41e772e9..70d1f0186 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -155,7 +155,7 @@ void initNix()
sshd). This breaks build users because they don't have access
to the TMPDIR, in particular in ‘nix-store --serve’. */
#if __APPLE__
- if (getuid() == 0 && hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
+ if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
unsetenv("TMPDIR");
#endif
}
@@ -165,28 +165,32 @@ LegacyArgs::LegacyArgs(const std::string & programName,
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg)
: MixCommonArgs(programName), parseArg(parseArg)
{
- mkFlag()
- .longName("no-build-output")
- .shortName('Q')
- .description("do not show build output")
- .set(&settings.verboseBuild, false);
-
- mkFlag()
- .longName("keep-failed")
- .shortName('K')
- .description("keep temporary directories of failed builds")
- .set(&(bool&) settings.keepFailed, true);
-
- mkFlag()
- .longName("keep-going")
- .shortName('k')
- .description("keep going after a build fails")
- .set(&(bool&) settings.keepGoing, true);
-
- mkFlag()
- .longName("fallback")
- .description("build from source if substitution fails")
- .set(&(bool&) settings.tryFallback, true);
+ addFlag({
+ .longName = "no-build-output",
+ .shortName = 'Q',
+ .description = "do not show build output",
+ .handler = {&settings.verboseBuild, false},
+ });
+
+ addFlag({
+ .longName = "keep-failed",
+ .shortName ='K',
+ .description = "keep temporary directories of failed builds",
+ .handler = {&(bool&) settings.keepFailed, true},
+ });
+
+ addFlag({
+ .longName = "keep-going",
+ .shortName ='k',
+ .description = "keep going after a build fails",
+ .handler = {&(bool&) settings.keepGoing, true},
+ });
+
+ addFlag({
+ .longName = "fallback",
+ .description = "build from source if substitution fails",
+ .handler = {&(bool&) settings.tryFallback, true},
+ });
auto intSettingAlias = [&](char shortName, const std::string & longName,
const std::string & description, const std::string & dest) {
@@ -205,11 +209,12 @@ LegacyArgs::LegacyArgs(const std::string & programName,
mkFlag(0, "no-gc-warning", "disable warning about not using '--add-root'",
&gcWarning, false);
- mkFlag()
- .longName("store")
- .label("store-uri")
- .description("URI of the Nix store to use")
- .dest(&(std::string&) settings.storeUri);
+ addFlag({
+ .longName = "store",
+ .description = "URI of the Nix store to use",
+ .labels = {"store-uri"},
+ .handler = {&(std::string&) settings.storeUri},
+ });
}
@@ -260,7 +265,10 @@ void printVersion(const string & programName)
cfg.push_back("signed-caches");
#endif
std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n";
- std::cout << "Configuration file: " << settings.nixConfDir + "/nix.conf" << "\n";
+ std::cout << "System configuration file: " << settings.nixConfDir + "/nix.conf" << "\n";
+ std::cout << "User configuration files: " <<
+ concatStringsSep(":", settings.nixUserConfFiles)
+ << "\n";
std::cout << "Store directory: " << settings.nixStore << "\n";
std::cout << "State directory: " << settings.nixStateDir << "\n";
}
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index be52e637e..272c4f1ae 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -7,7 +7,7 @@
#include "affinity.hh"
#include "builtins.hh"
#include "builtins/buildenv.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "finally.hh"
#include "compression.hh"
#include "json.hh"
@@ -33,7 +33,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/utsname.h>
-#include <sys/select.h>
#include <sys/resource.h>
#include <sys/socket.h>
#include <sys/un.h>
@@ -43,6 +42,7 @@
#include <errno.h>
#include <cstring>
#include <termios.h>
+#include <poll.h>
#include <pwd.h>
#include <grp.h>
@@ -361,7 +361,7 @@ public:
{
actDerivations.progress(doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds);
actSubstitutions.progress(doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions);
- act.setExpected(actDownload, expectedDownloadSize + doneDownloadSize);
+ act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize);
act.setExpected(actCopyPath, expectedNarSize + doneNarSize);
}
};
@@ -507,9 +507,10 @@ private:
Path fnUserLock;
AutoCloseFD fdUserLock;
+ bool isEnabled = false;
string user;
- uid_t uid;
- gid_t gid;
+ uid_t uid = 0;
+ gid_t gid = 0;
std::vector<gid_t> supplementaryGIDs;
public:
@@ -522,7 +523,9 @@ public:
uid_t getGID() { assert(gid); return gid; }
std::vector<gid_t> getSupplementaryGIDs() { return supplementaryGIDs; }
- bool enabled() { return uid != 0; }
+ bool findFreeUser();
+
+ bool enabled() { return isEnabled; }
};
@@ -530,6 +533,11 @@ public:
UserLock::UserLock()
{
assert(settings.buildUsersGroup != "");
+ createDirs(settings.nixStateDir + "/userpool");
+}
+
+bool UserLock::findFreeUser() {
+ if (enabled()) return true;
/* Get the members of the build-users-group. */
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
@@ -559,7 +567,6 @@ UserLock::UserLock()
throw Error(format("the user '%1%' in the group '%2%' does not exist")
% i % settings.buildUsersGroup);
- createDirs(settings.nixStateDir + "/userpool");
fnUserLock = (format("%1%/userpool/%2%") % settings.nixStateDir % pw->pw_uid).str();
@@ -590,16 +597,13 @@ UserLock::UserLock()
supplementaryGIDs.resize(ngroups);
#endif
- return;
+ isEnabled = true;
+ return true;
}
}
-
- throw Error(format("all build users are currently in use; "
- "consider creating additional users and adding them to the '%1%' group")
- % settings.buildUsersGroup);
+ return false;
}
-
void UserLock::kill()
{
killUser(uid);
@@ -928,6 +932,7 @@ private:
void closureRepaired();
void inputsRealised();
void tryToBuild();
+ void tryLocalBuild();
void buildDone();
/* Is the build hook willing to perform the build? */
@@ -999,6 +1004,8 @@ private:
Goal::amDone(result);
}
+ void started();
+
void done(BuildResult::Status status, const string & msg = "");
StorePathSet exportReferences(const StorePathSet & storePaths);
@@ -1386,6 +1393,19 @@ void DerivationGoal::inputsRealised()
result = BuildResult();
}
+void DerivationGoal::started() {
+ auto msg = fmt(
+ buildMode == bmRepair ? "repairing outputs of '%s'" :
+ buildMode == bmCheck ? "checking outputs of '%s'" :
+ nrRounds > 1 ? "building '%s' (round %d/%d)" :
+ "building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
+ fmt("building '%s'", worker.store.printStorePath(drvPath));
+ if (hook) msg += fmt(" on '%s'", machineName);
+ act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
+ Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
+ mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
+ worker.updateProgress();
+}
void DerivationGoal::tryToBuild()
{
@@ -1437,20 +1457,6 @@ void DerivationGoal::tryToBuild()
supported for local builds. */
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
- auto started = [&]() {
- auto msg = fmt(
- buildMode == bmRepair ? "repairing outputs of '%s'" :
- buildMode == bmCheck ? "checking outputs of '%s'" :
- nrRounds > 1 ? "building '%s' (round %d/%d)" :
- "building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
- fmt("building '%s'", worker.store.printStorePath(drvPath));
- if (hook) msg += fmt(" on '%s'", machineName);
- act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
- Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
- mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
- worker.updateProgress();
- };
-
/* Is the build hook willing to accept this job? */
if (!buildLocally) {
switch (tryBuildHook()) {
@@ -1483,6 +1489,34 @@ void DerivationGoal::tryToBuild()
return;
}
+ state = &DerivationGoal::tryLocalBuild;
+ worker.wakeUp(shared_from_this());
+}
+
+void DerivationGoal::tryLocalBuild() {
+
+ /* If `build-users-group' is not empty, then we have to build as
+ one of the members of that group. */
+ if (settings.buildUsersGroup != "" && getuid() == 0) {
+#if defined(__linux__) || defined(__APPLE__)
+ if (!buildUser) buildUser = std::make_unique<UserLock>();
+
+ if (buildUser->findFreeUser()) {
+ /* Make sure that no other processes are executing under this
+ uid. */
+ buildUser->kill();
+ } else {
+ debug("waiting for build users");
+ worker.waitForAWhile(shared_from_this());
+ return;
+ }
+#else
+ /* Don't know how to block the creation of setuid/setgid
+ binaries on this platform. */
+ throw Error("build users are not supported on this platform for security reasons");
+#endif
+ }
+
try {
/* Okay, we have to build. */
@@ -1680,6 +1714,7 @@ void DerivationGoal::buildDone()
}
if (buildMode == bmCheck) {
+ deleteTmpDir(true);
done(BuildResult::Built);
return;
}
@@ -1942,22 +1977,6 @@ void DerivationGoal::startBuilder()
#endif
}
- /* If `build-users-group' is not empty, then we have to build as
- one of the members of that group. */
- if (settings.buildUsersGroup != "" && getuid() == 0) {
-#if defined(__linux__) || defined(__APPLE__)
- buildUser = std::make_unique<UserLock>();
-
- /* Make sure that no other processes are executing under this
- uid. */
- buildUser->kill();
-#else
- /* Don't know how to block the creation of setuid/setgid
- binaries on this platform. */
- throw Error("build users are not supported on this platform for security reasons");
-#endif
- }
-
/* Create a temporary directory where the build will take
place. */
tmpDir = createTempDir("", "nix-build-" + std::string(drvPath.name()), false, false, 0700);
@@ -2161,7 +2180,7 @@ void DerivationGoal::startBuilder()
if (needsHashRewrite()) {
if (pathExists(homeDir))
- throw Error(format("directory '%1%' exists; please remove it") % homeDir);
+ throw Error(format("home directory '%1%' exists; please remove it to assure purity of builds without sandboxing") % homeDir);
/* We're not doing a chroot build, but we have some valid
output paths. Since we can't just overwrite or delete
@@ -2249,10 +2268,13 @@ void DerivationGoal::startBuilder()
if (chown(slaveName.c_str(), buildUser->getUID(), 0))
throw SysError("changing owner of pseudoterminal slave");
- } else {
+ }
+#if __APPLE__
+ else {
if (grantpt(builderOut.readSide.get()))
throw SysError("granting access to pseudoterminal slave");
}
+#endif
#if 0
// Mount the pt in the sandbox so that the "tty" command works.
@@ -2465,7 +2487,7 @@ void DerivationGoal::initTmpDir() {
auto hash = hashString(htSHA256, i.first);
string fn = ".attr-" + hash.to_string(Base32, false);
Path p = tmpDir + "/" + fn;
- writeFile(p, i.second);
+ writeFile(p, rewriteStrings(i.second, inputRewrites));
chownToBuilder(p);
env[i.first + "Path"] = tmpDirInSandbox + "/" + fn;
}
@@ -3151,7 +3173,7 @@ void DerivationGoal::runChild()
// Only use nss functions to resolve hosts and
// services. Don’t use it for anything else that may
// be configured for this system. This limits the
- // potential impurities introduced in fixed outputs.
+ // potential impurities introduced in fixed-outputs.
writeFile(chrootRootDir + "/etc/nsswitch.conf", "hosts: files dns\nservices: files\n");
ss.push_back("/etc/services");
@@ -3536,6 +3558,29 @@ StorePathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv
}
+static void moveCheckToStore(const Path & src, const Path & dst)
+{
+ /* For the rename of directory to succeed, we must be running as root or
+ the directory must be made temporarily writable (to update the
+ directory's parent link ".."). */
+ struct stat st;
+ if (lstat(src.c_str(), &st) == -1) {
+ throw SysError(format("getting attributes of path '%1%'") % src);
+ }
+
+ bool changePerm = (geteuid() && S_ISDIR(st.st_mode) && !(st.st_mode & S_IWUSR));
+
+ if (changePerm)
+ chmod_(src, st.st_mode | S_IWUSR);
+
+ if (rename(src.c_str(), dst.c_str()))
+ throw SysError(format("renaming '%1%' to '%2%'") % src % dst);
+
+ if (changePerm)
+ chmod_(dst, st.st_mode);
+}
+
+
void DerivationGoal::registerOutputs()
{
/* When using a build hook, the build hook can register the output
@@ -3654,7 +3699,8 @@ void DerivationGoal::registerOutputs()
/* The output path should be a regular file without execute permission. */
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
throw BuildError(
- format("output path '%1%' should be a non-executable regular file") % path);
+ format("output path '%1%' should be a non-executable regular file "
+ "since recursive hashing is not enabled (outputHashMode=flat)") % path);
}
/* Check the hash. In hash mode, move the path produced by
@@ -3716,8 +3762,7 @@ void DerivationGoal::registerOutputs()
if (settings.runDiffHook || settings.keepFailed) {
Path dst = worker.store.toRealPath(path + checkSuffix);
deletePath(dst);
- if (rename(actualPath.c_str(), dst.c_str()))
- throw SysError(format("renaming '%1%' to '%2%'") % actualPath % dst);
+ moveCheckToStore(actualPath, dst);
handleDiffHook(
buildUser ? buildUser->getUID() : getuid(),
@@ -3725,10 +3770,10 @@ void DerivationGoal::registerOutputs()
path, dst, worker.store.printStorePath(drvPath), tmpDir);
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs from '%s'",
- worker.store.printStorePath(drvPath), path, dst);
+ worker.store.printStorePath(drvPath), worker.store.toRealPath(path), dst);
} else
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs",
- worker.store.printStorePath(drvPath), path);
+ worker.store.printStorePath(drvPath), worker.store.toRealPath(path));
}
/* Since we verified the build, it's now ultimately trusted. */
@@ -4770,8 +4815,7 @@ void Worker::waitForInput()
terminated. */
bool useTimeout = false;
- struct timeval timeout;
- timeout.tv_usec = 0;
+ long timeout = 0;
auto before = steady_time_point::clock::now();
/* If we're monitoring for silence on stdout/stderr, or if there
@@ -4789,7 +4833,7 @@ void Worker::waitForInput()
nearest = std::min(nearest, i.timeStarted + std::chrono::seconds(settings.buildTimeout));
}
if (nearest != steady_time_point::max()) {
- timeout.tv_sec = std::max(1L, (long) std::chrono::duration_cast<std::chrono::seconds>(nearest - before).count());
+ timeout = std::max(1L, (long) std::chrono::duration_cast<std::chrono::seconds>(nearest - before).count());
useTimeout = true;
}
@@ -4798,32 +4842,30 @@ void Worker::waitForInput()
if (!waitingForAWhile.empty()) {
useTimeout = true;
if (lastWokenUp == steady_time_point::min())
- printError("waiting for locks or build slots...");
+ printError("waiting for locks, build slots or build users...");
if (lastWokenUp == steady_time_point::min() || lastWokenUp > before) lastWokenUp = before;
- timeout.tv_sec = std::max(1L,
+ timeout = std::max(1L,
(long) std::chrono::duration_cast<std::chrono::seconds>(
lastWokenUp + std::chrono::seconds(settings.pollInterval) - before).count());
} else lastWokenUp = steady_time_point::min();
if (useTimeout)
- vomit("sleeping %d seconds", timeout.tv_sec);
+ vomit("sleeping %d seconds", timeout);
/* Use select() to wait for the input side of any logger pipe to
become `available'. Note that `available' (i.e., non-blocking)
includes EOF. */
- fd_set fds;
- FD_ZERO(&fds);
- int fdMax = 0;
+ std::vector<struct pollfd> pollStatus;
+ std::map <int, int> fdToPollStatus;
for (auto & i : children) {
for (auto & j : i.fds) {
- if (j >= FD_SETSIZE)
- throw Error("reached FD_SETSIZE limit");
- FD_SET(j, &fds);
- if (j >= fdMax) fdMax = j + 1;
+ pollStatus.push_back((struct pollfd) { .fd = j, .events = POLLIN });
+ fdToPollStatus[j] = pollStatus.size() - 1;
}
}
- if (select(fdMax, &fds, 0, 0, useTimeout ? &timeout : 0) == -1) {
+ if (poll(pollStatus.data(), pollStatus.size(),
+ useTimeout ? timeout * 1000 : -1) == -1) {
if (errno == EINTR) return;
throw SysError("waiting for input");
}
@@ -4844,7 +4886,7 @@ void Worker::waitForInput()
set<int> fds2(j->fds);
std::vector<unsigned char> buffer(4096);
for (auto & k : fds2) {
- if (FD_ISSET(k, &fds)) {
+ if (pollStatus.at(fdToPollStatus.at(k)).revents) {
ssize_t rd = read(k, buffer.data(), buffer.size());
// FIXME: is there a cleaner way to handle pt close
// than EIO? Is this even standard?
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index f6ae5d2e6..486babf14 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -1,5 +1,5 @@
#include "builtins.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "archive.hh"
#include "compression.hh"
@@ -26,9 +26,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
auto mainUrl = getAttr("url");
bool unpack = get(drv.env, "unpack").value_or("") == "1";
- /* Note: have to use a fresh downloader here because we're in
+ /* Note: have to use a fresh fileTransfer here because we're in
a forked process. */
- auto downloader = makeDownloader();
+ auto fileTransfer = makeFileTransfer();
auto fetch = [&](const std::string & url) {
@@ -36,13 +36,13 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
/* No need to do TLS verification, because we check the hash of
the result anyway. */
- DownloadRequest request(url);
+ FileTransferRequest request(url);
request.verifyTLS = false;
request.decompress = false;
auto decompressor = makeDecompressionSink(
unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink);
- downloader->download(std::move(request), *decompressor);
+ fileTransfer->download(std::move(request), *decompressor);
decompressor->finish();
});
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 5934c1912..c68e7b16b 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -378,7 +378,7 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
if (h == drvHashes.end()) {
assert(store.isValidPath(i.first));
h = drvHashes.insert_or_assign(i.first.clone(), hashDerivationModulo(store,
- readDerivation(store, store.toRealPath(store.printStorePath(i.first))), false)).first;
+ readDerivation(store, store.toRealPath(i.first)), false)).first;
}
inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second);
}
diff --git a/src/libstore/download.cc b/src/libstore/filetransfer.cc
index 8a9b65899..e9684b3d4 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/filetransfer.cc
@@ -1,14 +1,10 @@
-#include "download.hh"
+#include "filetransfer.hh"
#include "util.hh"
#include "globals.hh"
-#include "hash.hh"
#include "store-api.hh"
-#include "archive.hh"
#include "s3.hh"
#include "compression.hh"
-#include "pathlocks.hh"
#include "finally.hh"
-#include "tarfile.hh"
#ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
@@ -31,13 +27,9 @@ using namespace std::string_literals;
namespace nix {
-DownloadSettings downloadSettings;
+FileTransferSettings fileTransferSettings;
-static GlobalConfig::Register r1(&downloadSettings);
-
-CachedDownloadRequest::CachedDownloadRequest(const std::string & uri)
- : uri(uri), ttl(settings.tarballTtl)
-{ }
+static GlobalConfig::Register r1(&fileTransferSettings);
std::string resolveUri(const std::string & uri)
{
@@ -47,21 +39,21 @@ std::string resolveUri(const std::string & uri)
return uri;
}
-struct CurlDownloader : public Downloader
+struct curlFileTransfer : public FileTransfer
{
CURLM * curlm = 0;
std::random_device rd;
std::mt19937 mt19937;
- struct DownloadItem : public std::enable_shared_from_this<DownloadItem>
+ struct TransferItem : public std::enable_shared_from_this<TransferItem>
{
- CurlDownloader & downloader;
- DownloadRequest request;
- DownloadResult result;
+ curlFileTransfer & fileTransfer;
+ FileTransferRequest request;
+ FileTransferResult result;
Activity act;
bool done = false; // whether either the success or failure function has been called
- Callback<DownloadResult> callback;
+ Callback<FileTransferResult> callback;
CURL * req = 0;
bool active = false; // whether the handle has been added to the multi object
std::string status;
@@ -80,19 +72,26 @@ struct CurlDownloader : public Downloader
curl_off_t writtenToSink = 0;
- DownloadItem(CurlDownloader & downloader,
- const DownloadRequest & request,
- Callback<DownloadResult> && callback)
- : downloader(downloader)
+ TransferItem(curlFileTransfer & fileTransfer,
+ const FileTransferRequest & request,
+ Callback<FileTransferResult> && callback)
+ : fileTransfer(fileTransfer)
, request(request)
- , act(*logger, lvlTalkative, actDownload,
+ , act(*logger, lvlTalkative, actFileTransfer,
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
{request.uri}, request.parentAct)
, callback(std::move(callback))
, finalSink([this](const unsigned char * data, size_t len) {
if (this->request.dataCallback) {
- writtenToSink += len;
- this->request.dataCallback((char *) data, len);
+ long httpStatus = 0;
+ curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
+
+ /* Only write data to the sink if this is a
+ successful response. */
+ if (httpStatus == 0 || httpStatus == 200 || httpStatus == 201 || httpStatus == 206) {
+ writtenToSink += len;
+ this->request.dataCallback((char *) data, len);
+ }
} else
this->result.data->append((char *) data, len);
})
@@ -103,17 +102,17 @@ struct CurlDownloader : public Downloader
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
}
- ~DownloadItem()
+ ~TransferItem()
{
if (req) {
if (active)
- curl_multi_remove_handle(downloader.curlm, req);
+ curl_multi_remove_handle(fileTransfer.curlm, req);
curl_easy_cleanup(req);
}
if (requestHeaders) curl_slist_free_all(requestHeaders);
try {
if (!done)
- fail(DownloadError(Interrupted, format("download of '%s' was interrupted") % request.uri));
+ fail(FileTransferError(Interrupted, format("download of '%s' was interrupted") % request.uri));
} catch (...) {
ignoreException();
}
@@ -157,7 +156,7 @@ struct CurlDownloader : public Downloader
static size_t writeCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
{
- return ((DownloadItem *) userp)->writeCallback(contents, size, nmemb);
+ return ((TransferItem *) userp)->writeCallback(contents, size, nmemb);
}
size_t headerCallback(void * contents, size_t size, size_t nmemb)
@@ -199,7 +198,7 @@ struct CurlDownloader : public Downloader
static size_t headerCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
{
- return ((DownloadItem *) userp)->headerCallback(contents, size, nmemb);
+ return ((TransferItem *) userp)->headerCallback(contents, size, nmemb);
}
int progressCallback(double dltotal, double dlnow)
@@ -214,7 +213,7 @@ struct CurlDownloader : public Downloader
static int progressCallbackWrapper(void * userp, double dltotal, double dlnow, double ultotal, double ulnow)
{
- return ((DownloadItem *) userp)->progressCallback(dltotal, dlnow);
+ return ((TransferItem *) userp)->progressCallback(dltotal, dlnow);
}
static int debugCallback(CURL * handle, curl_infotype type, char * data, size_t size, void * userptr)
@@ -238,7 +237,7 @@ struct CurlDownloader : public Downloader
static size_t readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp)
{
- return ((DownloadItem *) userp)->readCallback(buffer, size, nitems);
+ return ((TransferItem *) userp)->readCallback(buffer, size, nitems);
}
void init()
@@ -249,7 +248,7 @@ struct CurlDownloader : public Downloader
if (verbosity >= lvlVomit) {
curl_easy_setopt(req, CURLOPT_VERBOSE, 1);
- curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, DownloadItem::debugCallback);
+ curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
}
curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str());
@@ -258,19 +257,19 @@ struct CurlDownloader : public Downloader
curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1);
curl_easy_setopt(req, CURLOPT_USERAGENT,
("curl/" LIBCURL_VERSION " Nix/" + nixVersion +
- (downloadSettings.userAgentSuffix != "" ? " " + downloadSettings.userAgentSuffix.get() : "")).c_str());
+ (fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")).c_str());
#if LIBCURL_VERSION_NUM >= 0x072b00
curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1);
#endif
#if LIBCURL_VERSION_NUM >= 0x072f00
- if (downloadSettings.enableHttp2)
+ if (fileTransferSettings.enableHttp2)
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS);
else
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
#endif
- curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, DownloadItem::writeCallbackWrapper);
+ curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, TransferItem::writeCallbackWrapper);
curl_easy_setopt(req, CURLOPT_WRITEDATA, this);
- curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, DownloadItem::headerCallbackWrapper);
+ curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, TransferItem::headerCallbackWrapper);
curl_easy_setopt(req, CURLOPT_HEADERDATA, this);
curl_easy_setopt(req, CURLOPT_PROGRESSFUNCTION, progressCallbackWrapper);
@@ -298,10 +297,10 @@ struct CurlDownloader : public Downloader
curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0);
}
- curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, downloadSettings.connectTimeout.get());
+ curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, fileTransferSettings.connectTimeout.get());
curl_easy_setopt(req, CURLOPT_LOW_SPEED_LIMIT, 1L);
- curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, downloadSettings.stalledDownloadTimeout.get());
+ curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, fileTransferSettings.stalledDownloadTimeout.get());
/* If no file exist in the specified path, curl continues to work
anyway as if netrc support was disabled. */
@@ -390,6 +389,7 @@ struct CurlDownloader : public Downloader
case CURLE_SSL_CACERT_BADFILE:
case CURLE_TOO_MANY_REDIRECTS:
case CURLE_WRITE_ERROR:
+ case CURLE_UNSUPPORTED_PROTOCOL:
err = Misc;
break;
default: // Shut up warnings
@@ -401,14 +401,14 @@ struct CurlDownloader : public Downloader
auto exc =
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
- ? DownloadError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
+ ? FileTransferError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
: httpStatus != 0
- ? DownloadError(err,
+ ? FileTransferError(err,
fmt("unable to %s '%s': HTTP error %d",
request.verb(), request.uri, httpStatus)
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
)
- : DownloadError(err,
+ : FileTransferError(err,
fmt("unable to %s '%s': %s (%d)",
request.verb(), request.uri, curl_easy_strerror(code), code));
@@ -422,13 +422,13 @@ struct CurlDownloader : public Downloader
|| writtenToSink == 0
|| (acceptRanges && encoding.empty())))
{
- int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(downloader.mt19937));
+ int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(fileTransfer.mt19937));
if (writtenToSink)
warn("%s; retrying from offset %d in %d ms", exc.what(), writtenToSink, ms);
else
warn("%s; retrying in %d ms", exc.what(), ms);
embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms);
- downloader.enqueueItem(shared_from_this());
+ fileTransfer.enqueueItem(shared_from_this());
}
else
fail(exc);
@@ -439,12 +439,12 @@ struct CurlDownloader : public Downloader
struct State
{
struct EmbargoComparator {
- bool operator() (const std::shared_ptr<DownloadItem> & i1, const std::shared_ptr<DownloadItem> & i2) {
+ bool operator() (const std::shared_ptr<TransferItem> & i1, const std::shared_ptr<TransferItem> & i2) {
return i1->embargo > i2->embargo;
}
};
bool quit = false;
- std::priority_queue<std::shared_ptr<DownloadItem>, std::vector<std::shared_ptr<DownloadItem>>, EmbargoComparator> incoming;
+ std::priority_queue<std::shared_ptr<TransferItem>, std::vector<std::shared_ptr<TransferItem>>, EmbargoComparator> incoming;
};
Sync<State> state_;
@@ -456,7 +456,7 @@ struct CurlDownloader : public Downloader
std::thread workerThread;
- CurlDownloader()
+ curlFileTransfer()
: mt19937(rd())
{
static std::once_flag globalInit;
@@ -469,7 +469,7 @@ struct CurlDownloader : public Downloader
#endif
#if LIBCURL_VERSION_NUM >= 0x071e00 // Max connections requires >= 7.30.0
curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS,
- downloadSettings.httpConnections.get());
+ fileTransferSettings.httpConnections.get());
#endif
wakeupPipe.create();
@@ -478,7 +478,7 @@ struct CurlDownloader : public Downloader
workerThread = std::thread([&]() { workerThreadEntry(); });
}
- ~CurlDownloader()
+ ~curlFileTransfer()
{
stopWorkerThread();
@@ -504,7 +504,7 @@ struct CurlDownloader : public Downloader
stopWorkerThread();
});
- std::map<CURL *, std::shared_ptr<DownloadItem>> items;
+ std::map<CURL *, std::shared_ptr<TransferItem>> items;
bool quit = false;
@@ -561,7 +561,7 @@ struct CurlDownloader : public Downloader
throw SysError("reading curl wakeup socket");
}
- std::vector<std::shared_ptr<DownloadItem>> incoming;
+ std::vector<std::shared_ptr<TransferItem>> incoming;
auto now = std::chrono::steady_clock::now();
{
@@ -609,7 +609,7 @@ struct CurlDownloader : public Downloader
}
}
- void enqueueItem(std::shared_ptr<DownloadItem> item)
+ void enqueueItem(std::shared_ptr<TransferItem> item)
{
if (item->request.data
&& !hasPrefix(item->request.uri, "http://")
@@ -641,8 +641,8 @@ struct CurlDownloader : public Downloader
}
#endif
- void enqueueDownload(const DownloadRequest & request,
- Callback<DownloadResult> callback) override
+ void enqueueFileTransfer(const FileTransferRequest & request,
+ Callback<FileTransferResult> callback) override
{
/* Ugly hack to support s3:// URIs. */
if (hasPrefix(request.uri, "s3://")) {
@@ -660,9 +660,9 @@ struct CurlDownloader : public Downloader
// FIXME: implement ETag
auto s3Res = s3Helper.getObject(bucketName, key);
- DownloadResult res;
+ FileTransferResult res;
if (!s3Res.data)
- throw DownloadError(NotFound, fmt("S3 object '%s' does not exist", request.uri));
+ throw FileTransferError(NotFound, fmt("S3 object '%s' does not exist", request.uri));
res.data = s3Res.data;
callback(std::move(res));
#else
@@ -672,26 +672,26 @@ struct CurlDownloader : public Downloader
return;
}
- enqueueItem(std::make_shared<DownloadItem>(*this, request, std::move(callback)));
+ enqueueItem(std::make_shared<TransferItem>(*this, request, std::move(callback)));
}
};
-ref<Downloader> getDownloader()
+ref<FileTransfer> getFileTransfer()
{
- static ref<Downloader> downloader = makeDownloader();
- return downloader;
+ static ref<FileTransfer> fileTransfer = makeFileTransfer();
+ return fileTransfer;
}
-ref<Downloader> makeDownloader()
+ref<FileTransfer> makeFileTransfer()
{
- return make_ref<CurlDownloader>();
+ return make_ref<curlFileTransfer>();
}
-std::future<DownloadResult> Downloader::enqueueDownload(const DownloadRequest & request)
+std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
{
- auto promise = std::make_shared<std::promise<DownloadResult>>();
- enqueueDownload(request,
- {[promise](std::future<DownloadResult> fut) {
+ auto promise = std::make_shared<std::promise<FileTransferResult>>();
+ enqueueFileTransfer(request,
+ {[promise](std::future<FileTransferResult> fut) {
try {
promise->set_value(fut.get());
} catch (...) {
@@ -701,15 +701,21 @@ std::future<DownloadResult> Downloader::enqueueDownload(const DownloadRequest &
return promise->get_future();
}
-DownloadResult Downloader::download(const DownloadRequest & request)
+FileTransferResult FileTransfer::download(const FileTransferRequest & request)
+{
+ return enqueueFileTransfer(request).get();
+}
+
+FileTransferResult FileTransfer::upload(const FileTransferRequest & request)
{
- return enqueueDownload(request).get();
+ /* Note: this method is the same as download, but helps in readability */
+ return enqueueFileTransfer(request).get();
}
-void Downloader::download(DownloadRequest && request, Sink & sink)
+void FileTransfer::download(FileTransferRequest && request, Sink & sink)
{
/* Note: we can't call 'sink' via request.dataCallback, because
- that would cause the sink to execute on the downloader
+ that would cause the sink to execute on the fileTransfer
thread. If 'sink' is a coroutine, this will fail. Also, if the
sink is expensive (e.g. one that does decompression and writing
to the Nix store), it would stall the download thread too much.
@@ -755,8 +761,8 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
state->avail.notify_one();
};
- enqueueDownload(request,
- {[_state](std::future<DownloadResult> fut) {
+ enqueueFileTransfer(request,
+ {[_state](std::future<FileTransferResult> fut) {
auto state(_state->lock());
state->quit = true;
try {
@@ -801,141 +807,6 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
}
}
-CachedDownloadResult Downloader::downloadCached(
- ref<Store> store, const CachedDownloadRequest & request)
-{
- auto url = resolveUri(request.uri);
-
- auto name = request.name;
- if (name == "") {
- auto p = url.rfind('/');
- if (p != string::npos) name = string(url, p + 1);
- }
-
- std::optional<StorePath> expectedStorePath;
- if (request.expectedHash) {
- auto method = request.unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
- expectedStorePath = store->makeFixedOutputPath(method, request.expectedHash, name);
- if (store->isValidPath(*expectedStorePath)) {
- CachedDownloadResult result;
- result.storePath = store->printStorePath(*expectedStorePath);
- result.path = store->toRealPath(result.storePath);
- return result;
- }
- }
-
- Path cacheDir = getCacheDir() + "/nix/tarballs";
- createDirs(cacheDir);
-
- string urlHash = hashString(htSHA256, name + std::string("\0"s) + url).to_string(Base32, false);
-
- Path dataFile = cacheDir + "/" + urlHash + ".info";
- Path fileLink = cacheDir + "/" + urlHash + "-file";
-
- PathLocks lock({fileLink}, fmt("waiting for lock on '%1%'...", fileLink));
-
- std::optional<StorePath> storePath;
-
- string expectedETag;
-
- bool skip = false;
-
- CachedDownloadResult result;
-
- if (pathExists(fileLink) && pathExists(dataFile)) {
- storePath = store->parseStorePath(readLink(fileLink));
- // FIXME
- store->addTempRoot(*storePath);
- if (store->isValidPath(*storePath)) {
- auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
- if (ss.size() >= 3 && ss[0] == url) {
- time_t lastChecked;
- if (string2Int(ss[2], lastChecked) && (uint64_t) lastChecked + request.ttl >= (uint64_t) time(0)) {
- skip = true;
- result.effectiveUri = request.uri;
- result.etag = ss[1];
- } else if (!ss[1].empty()) {
- debug(format("verifying previous ETag '%1%'") % ss[1]);
- expectedETag = ss[1];
- }
- }
- } else
- storePath.reset();
- }
-
- if (!skip) {
-
- try {
- DownloadRequest request2(url);
- request2.expectedETag = expectedETag;
- auto res = download(request2);
- result.effectiveUri = res.effectiveUri;
- result.etag = res.etag;
-
- if (!res.cached) {
- StringSink sink;
- dumpString(*res.data, sink);
- Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data);
- ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
- info.narHash = hashString(htSHA256, *sink.s);
- info.narSize = sink.s->size();
- info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
- store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
- storePath = info.path.clone();
- }
-
- assert(storePath);
- replaceSymlink(store->printStorePath(*storePath), fileLink);
-
- writeFile(dataFile, url + "\n" + res.etag + "\n" + std::to_string(time(0)) + "\n");
- } catch (DownloadError & e) {
- if (!storePath) throw;
- warn("warning: %s; using cached result", e.msg());
- result.etag = expectedETag;
- }
- }
-
- if (request.unpack) {
- Path unpackedLink = cacheDir + "/" + ((std::string) storePath->to_string()) + "-unpacked";
- PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
- std::optional<StorePath> unpackedStorePath;
- if (pathExists(unpackedLink)) {
- unpackedStorePath = store->parseStorePath(readLink(unpackedLink));
- // FIXME
- store->addTempRoot(*unpackedStorePath);
- if (!store->isValidPath(*unpackedStorePath))
- unpackedStorePath.reset();
- }
- if (!unpackedStorePath) {
- printInfo("unpacking '%s'...", url);
- Path tmpDir = createTempDir();
- AutoDelete autoDelete(tmpDir, true);
- unpackTarfile(store->toRealPath(store->printStorePath(*storePath)), tmpDir);
- auto members = readDirectory(tmpDir);
- if (members.size() != 1)
- throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
- auto topDir = tmpDir + "/" + members.begin()->name;
- unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
- }
- replaceSymlink(store->printStorePath(*unpackedStorePath), unpackedLink);
- storePath = std::move(*unpackedStorePath);
- }
-
- if (expectedStorePath && *storePath != *expectedStorePath) {
- unsigned int statusCode = 102;
- Hash gotHash = request.unpack
- ? hashPath(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath))).first
- : hashFile(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath)));
- throw nix::Error(statusCode, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
- url, request.expectedHash.to_string(), gotHash.to_string());
- }
-
- result.storePath = store->printStorePath(*storePath);
- result.path = store->toRealPath(result.storePath);
- return result;
-}
-
-
bool isUri(const string & s)
{
if (s.compare(0, 8, "channel:") == 0) return true;
diff --git a/src/libstore/download.hh b/src/libstore/filetransfer.hh
index 5a131c704..2347f363d 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/filetransfer.hh
@@ -9,7 +9,7 @@
namespace nix {
-struct DownloadSettings : Config
+struct FileTransferSettings : Config
{
Setting<bool> enableHttp2{this, true, "http2",
"Whether to enable HTTP/2 support."};
@@ -31,15 +31,15 @@ struct DownloadSettings : Config
"How often Nix will attempt to download a file before giving up."};
};
-extern DownloadSettings downloadSettings;
+extern FileTransferSettings fileTransferSettings;
-struct DownloadRequest
+struct FileTransferRequest
{
std::string uri;
std::string expectedETag;
bool verifyTLS = true;
bool head = false;
- size_t tries = downloadSettings.tries;
+ size_t tries = fileTransferSettings.tries;
unsigned int baseRetryTimeMs = 250;
ActivityId parentAct;
bool decompress = true;
@@ -47,7 +47,7 @@ struct DownloadRequest
std::string mimeType;
std::function<void(char *, size_t)> dataCallback;
- DownloadRequest(const std::string & uri)
+ FileTransferRequest(const std::string & uri)
: uri(uri), parentAct(getCurActivity()) { }
std::string verb()
@@ -56,7 +56,7 @@ struct DownloadRequest
}
};
-struct DownloadResult
+struct FileTransferResult
{
bool cached = false;
std::string etag;
@@ -65,74 +65,52 @@ struct DownloadResult
uint64_t bodySize = 0;
};
-struct CachedDownloadRequest
-{
- std::string uri;
- bool unpack = false;
- std::string name;
- Hash expectedHash;
- unsigned int ttl;
-
- CachedDownloadRequest(const std::string & uri);
- CachedDownloadRequest() = delete;
-};
-
-struct CachedDownloadResult
-{
- // Note: 'storePath' may be different from 'path' when using a
- // chroot store.
- Path storePath;
- Path path;
- std::optional<std::string> etag;
- std::string effectiveUri;
-};
-
class Store;
-struct Downloader
+struct FileTransfer
{
- virtual ~Downloader() { }
+ virtual ~FileTransfer() { }
- /* Enqueue a download request, returning a future to the result of
- the download. The future may throw a DownloadError
+ /* Enqueue a data transfer request, returning a future to the result of
+ the download. The future may throw a FileTransferError
exception. */
- virtual void enqueueDownload(const DownloadRequest & request,
- Callback<DownloadResult> callback) = 0;
+ virtual void enqueueFileTransfer(const FileTransferRequest & request,
+ Callback<FileTransferResult> callback) = 0;
- std::future<DownloadResult> enqueueDownload(const DownloadRequest & request);
+ std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request);
/* Synchronously download a file. */
- DownloadResult download(const DownloadRequest & request);
+ FileTransferResult download(const FileTransferRequest & request);
+
+ /* Synchronously upload a file. */
+ FileTransferResult upload(const FileTransferRequest & request);
/* Download a file, writing its data to a sink. The sink will be
invoked on the thread of the caller. */
- void download(DownloadRequest && request, Sink & sink);
-
- /* Check if the specified file is already in ~/.cache/nix/tarballs
- and is more recent than ‘tarball-ttl’ seconds. Otherwise,
- use the recorded ETag to verify if the server has a more
- recent version, and if so, download it to the Nix store. */
- CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request);
+ void download(FileTransferRequest && request, Sink & sink);
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
};
-/* Return a shared Downloader object. Using this object is preferred
+/* Return a shared FileTransfer object. Using this object is preferred
because it enables connection reuse and HTTP/2 multiplexing. */
-ref<Downloader> getDownloader();
+ref<FileTransfer> getFileTransfer();
-/* Return a new Downloader object. */
-ref<Downloader> makeDownloader();
+/* Return a new FileTransfer object. */
+ref<FileTransfer> makeFileTransfer();
-class DownloadError : public Error
+class FileTransferError : public Error
{
public:
- Downloader::Error error;
- DownloadError(Downloader::Error error, const FormatOrString & fs)
+ FileTransfer::Error error;
+ FileTransferError(FileTransfer::Error error, const FormatOrString & fs)
: Error(fs), error(error)
{ }
};
bool isUri(const string & s);
+/* Resolve deprecated 'channel:<foo>' URLs. */
+std::string resolveUri(const std::string & uri);
+
}
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index 0c3d89611..95a4bc934 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -202,6 +202,11 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
/* Read the `temproots' directory for per-process temporary root
files. */
for (auto & i : readDirectory(tempRootsDir)) {
+ if (i.name[0] == '.') {
+ // Ignore hidden files. Some package managers (notably portage) create
+ // those to keep the directory alive.
+ continue;
+ }
Path path = tempRootsDir + "/" + i.name;
pid_t pid = std::stoi(i.name);
@@ -414,7 +419,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
try {
auto mapFile = fmt("/proc/%s/maps", ent->d_name);
- auto mapLines = tokenizeString<std::vector<string>>(readFile(mapFile, true), "\n");
+ auto mapLines = tokenizeString<std::vector<string>>(readFile(mapFile), "\n");
for (const auto & line : mapLines) {
auto match = std::smatch{};
if (std::regex_match(line, match, mapRegex))
@@ -422,7 +427,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
}
auto envFile = fmt("/proc/%s/environ", ent->d_name);
- auto envString = readFile(envFile, true);
+ auto envString = readFile(envFile);
auto env_end = std::sregex_iterator{};
for (auto i = std::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i)
unchecked[i->str()].emplace(envFile);
@@ -884,7 +889,7 @@ void LocalStore::autoGC(bool sync)
if (statvfs(realStoreDir.c_str(), &st))
throw SysError("getting filesystem info about '%s'", realStoreDir);
- return (uint64_t) st.f_bavail * st.f_bsize;
+ return (uint64_t) st.f_bavail * st.f_frsize;
};
std::shared_future<void> future;
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 7e97f3c22..bee94cbd8 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -31,6 +31,7 @@ Settings::Settings()
, nixLogDir(canonPath(getEnv("NIX_LOG_DIR").value_or(NIX_LOG_DIR)))
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
+ , nixUserConfFiles(getUserConfigFiles())
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
@@ -77,11 +78,27 @@ void loadConfFile()
~/.nix/nix.conf or the command line. */
globalConfig.resetOverriden();
+ auto files = settings.nixUserConfFiles;
+ for (auto file = files.rbegin(); file != files.rend(); file++) {
+ globalConfig.applyConfigFile(*file);
+ }
+}
+
+std::vector<Path> getUserConfigFiles()
+{
+ // Use the paths specified in NIX_USER_CONF_FILES if it has been defined
+ auto nixConfFiles = getEnv("NIX_USER_CONF_FILES");
+ if (nixConfFiles.has_value()) {
+ return tokenizeString<std::vector<string>>(nixConfFiles.value(), ":");
+ }
+
+ // Use the paths specified by the XDG spec
+ std::vector<Path> files;
auto dirs = getConfigDirs();
- // Iterate over them in reverse so that the ones appearing first in the path take priority
- for (auto dir = dirs.rbegin(); dir != dirs.rend(); dir++) {
- globalConfig.applyConfigFile(*dir + "/nix/nix.conf");
+ for (auto & dir : dirs) {
+ files.insert(files.end(), dir + "/nix/nix.conf");
}
+ return files;
}
unsigned int Settings::getDefaultCores()
@@ -113,7 +130,7 @@ bool Settings::isExperimentalFeatureEnabled(const std::string & name)
void Settings::requireExperimentalFeature(const std::string & name)
{
if (!isExperimentalFeatureEnabled(name))
- throw Error("experimental Nix feature '%s' is disabled", name);
+ throw Error("experimental Nix feature '%1%' is disabled; use '--experimental-features %1%' to override", name);
}
bool Settings::isWSL1()
@@ -150,21 +167,24 @@ template<> void BaseSetting<SandboxMode>::toJSON(JSONPlaceholder & out)
template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category)
{
- args.mkFlag()
- .longName(name)
- .description("Enable sandboxing.")
- .handler([=](std::vector<std::string> ss) { override(smEnabled); })
- .category(category);
- args.mkFlag()
- .longName("no-" + name)
- .description("Disable sandboxing.")
- .handler([=](std::vector<std::string> ss) { override(smDisabled); })
- .category(category);
- args.mkFlag()
- .longName("relaxed-" + name)
- .description("Enable sandboxing, but allow builds to disable it.")
- .handler([=](std::vector<std::string> ss) { override(smRelaxed); })
- .category(category);
+ args.addFlag({
+ .longName = name,
+ .description = "Enable sandboxing.",
+ .category = category,
+ .handler = {[=]() { override(smEnabled); }}
+ });
+ args.addFlag({
+ .longName = "no-" + name,
+ .description = "Disable sandboxing.",
+ .category = category,
+ .handler = {[=]() { override(smDisabled); }}
+ });
+ args.addFlag({
+ .longName = "relaxed-" + name,
+ .description = "Enable sandboxing, but allow builds to disable it.",
+ .category = category,
+ .handler = {[=]() { override(smRelaxed); }}
+ });
}
void MaxBuildJobsSetting::set(const std::string & str)
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 3aa3653f3..da95fd3ae 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -53,9 +53,12 @@ public:
/* The directory where state is stored. */
Path nixStateDir;
- /* The directory where configuration files are stored. */
+ /* The directory where system configuration files are stored. */
Path nixConfDir;
+ /* A list of user configuration files to load. */
+ std::vector<Path> nixUserConfFiles;
+
/* The directory where internal helper programs are stored. */
Path nixLibexecDir;
@@ -351,12 +354,21 @@ public:
Setting<Paths> pluginFiles{this, {}, "plugin-files",
"Plugins to dynamically load at nix initialization time."};
+ Setting<std::string> githubAccessToken{this, "", "github-access-token",
+ "GitHub access token to get access to GitHub data through the GitHub API for github:<..> flakes."};
+
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
bool isExperimentalFeatureEnabled(const std::string & name);
void requireExperimentalFeature(const std::string & name);
+
+ Setting<bool> allowDirty{this, true, "allow-dirty",
+ "Whether to allow dirty Git/Mercurial trees."};
+
+ Setting<bool> warnDirty{this, true, "warn-dirty",
+ "Whether to warn about dirty Git/Mercurial trees."};
};
@@ -369,6 +381,9 @@ void initPlugins();
void loadConfFile();
+// Used by the Settings constructor
+std::vector<Path> getUserConfigFiles();
+
extern const string nixVersion;
}
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 011794c62..451a64785 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -1,5 +1,5 @@
#include "binary-cache-store.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "globals.hh"
#include "nar-info-disk-cache.hh"
@@ -85,14 +85,14 @@ protected:
checkEnabled();
try {
- DownloadRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(cacheUri + "/" + path);
request.head = true;
- getDownloader()->download(request);
+ getFileTransfer()->download(request);
return true;
- } catch (DownloadError & e) {
+ } catch (FileTransferError & e) {
/* S3 buckets return 403 if a file doesn't exist and the
bucket is unlistable, so treat 403 as 404. */
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
return false;
maybeDisable();
throw;
@@ -103,19 +103,19 @@ protected:
const std::string & data,
const std::string & mimeType) override
{
- auto req = DownloadRequest(cacheUri + "/" + path);
+ auto req = FileTransferRequest(cacheUri + "/" + path);
req.data = std::make_shared<string>(data); // FIXME: inefficient
req.mimeType = mimeType;
try {
- getDownloader()->download(req);
- } catch (DownloadError & e) {
+ getFileTransfer()->upload(req);
+ } catch (FileTransferError & e) {
throw UploadToHTTP("while uploading to HTTP binary cache at '%s': %s", cacheUri, e.msg());
}
}
- DownloadRequest makeRequest(const std::string & path)
+ FileTransferRequest makeRequest(const std::string & path)
{
- DownloadRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(cacheUri + "/" + path);
return request;
}
@@ -124,9 +124,9 @@ protected:
checkEnabled();
auto request(makeRequest(path));
try {
- getDownloader()->download(std::move(request), sink);
- } catch (DownloadError & e) {
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ getFileTransfer()->download(std::move(request), sink);
+ } catch (FileTransferError & e) {
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri());
maybeDisable();
throw;
@@ -142,12 +142,12 @@ protected:
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
- getDownloader()->enqueueDownload(request,
- {[callbackPtr, this](std::future<DownloadResult> result) {
+ getFileTransfer()->enqueueFileTransfer(request,
+ {[callbackPtr, this](std::future<FileTransferResult> result) {
try {
(*callbackPtr)(result.get().data);
- } catch (DownloadError & e) {
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ } catch (FileTransferError & e) {
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
return (*callbackPtr)(std::shared_ptr<std::string>());
maybeDisable();
callbackPtr->rethrow();
@@ -174,4 +174,3 @@ static RegisterStoreImplementation regStore([](
});
}
-
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index f2e4b63e0..b24e7b7d6 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -6,7 +6,7 @@
#include "nar-info-disk-cache.hh"
#include "globals.hh"
#include "compression.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "istringstream_nocopy.hh"
#include <aws/core/Aws.h>
@@ -132,7 +132,7 @@ ref<Aws::Client::ClientConfiguration> S3Helper::makeConfig(const string & region
return res;
}
-S3Helper::DownloadResult S3Helper::getObject(
+S3Helper::FileTransferResult S3Helper::getObject(
const std::string & bucketName, const std::string & key)
{
debug("fetching 's3://%s/%s'...", bucketName, key);
@@ -146,7 +146,7 @@ S3Helper::DownloadResult S3Helper::getObject(
return Aws::New<std::stringstream>("STRINGSTREAM");
});
- DownloadResult res;
+ FileTransferResult res;
auto now1 = std::chrono::steady_clock::now();
diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh
index ef5f23d0f..2042bffcf 100644
--- a/src/libstore/s3.hh
+++ b/src/libstore/s3.hh
@@ -18,13 +18,13 @@ struct S3Helper
ref<Aws::Client::ClientConfiguration> makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint);
- struct DownloadResult
+ struct FileTransferResult
{
std::shared_ptr<std::string> data;
unsigned int durationMs;
};
- DownloadResult getObject(
+ FileTransferResult getObject(
const std::string & bucketName, const std::string & key);
};
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index ca1409588..86cb20a26 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -6,6 +6,7 @@
#include "thread-pool.hh"
#include "json.hh"
#include "derivations.hh"
+#include "url.hh"
#include <future>
@@ -40,7 +41,7 @@ Path Store::followLinksToStore(std::string_view _path) const
path = absPath(target, dirOf(path));
}
if (!isInStore(path))
- throw Error(format("path '%1%' is not in the Nix store") % path);
+ throw NotInStore("path '%1%' is not in the Nix store", path);
return path;
}
@@ -873,27 +874,7 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
Store::Params params;
auto q = uri.find('?');
if (q != std::string::npos) {
- for (auto s : tokenizeString<Strings>(uri.substr(q + 1), "&")) {
- auto e = s.find('=');
- if (e != std::string::npos) {
- auto value = s.substr(e + 1);
- std::string decoded;
- for (size_t i = 0; i < value.size(); ) {
- if (value[i] == '%') {
- if (i + 2 >= value.size())
- throw Error("invalid URI parameter '%s'", value);
- try {
- decoded += std::stoul(std::string(value, i + 1, 2), 0, 16);
- i += 3;
- } catch (...) {
- throw Error("invalid URI parameter '%s'", value);
- }
- } else
- decoded += value[i++];
- }
- params[s.substr(0, e)] = decoded;
- }
- }
+ params = decodeQuery(uri.substr(q + 1));
uri = uri_.substr(0, q);
}
return {uri, params};
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 32c24c500..3def209a6 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -28,6 +28,7 @@ MakeError(InvalidPath, Error);
MakeError(Unsupported, Error);
MakeError(SubstituteGone, Error);
MakeError(SubstituterDisabled, Error);
+MakeError(NotInStore, Error);
struct BasicDerivation;
diff --git a/src/libutil/ansicolor.hh b/src/libutil/ansicolor.hh
new file mode 100644
index 000000000..8ae07b092
--- /dev/null
+++ b/src/libutil/ansicolor.hh
@@ -0,0 +1,15 @@
+#pragma once
+
+namespace nix {
+
+/* Some ANSI escape sequences. */
+#define ANSI_NORMAL "\e[0m"
+#define ANSI_BOLD "\e[1m"
+#define ANSI_FAINT "\e[2m"
+#define ANSI_ITALIC "\e[3m"
+#define ANSI_RED "\e[31;1m"
+#define ANSI_GREEN "\e[32;1m"
+#define ANSI_YELLOW "\e[33;1m"
+#define ANSI_BLUE "\e[34;1m"
+
+}
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index ba15ea571..f829415d1 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -3,16 +3,14 @@
namespace nix {
-Args::FlagMaker Args::mkFlag()
-{
- return FlagMaker(*this);
-}
-
-Args::FlagMaker::~FlagMaker()
+void Args::addFlag(Flag && flag_)
{
+ auto flag = std::make_shared<Flag>(std::move(flag_));
+ if (flag->handler.arity != ArityAny)
+ assert(flag->handler.arity == flag->labels.size());
assert(flag->longName != "");
- args.longFlags[flag->longName] = flag;
- if (flag->shortName) args.shortFlags[flag->shortName] = flag;
+ longFlags[flag->longName] = flag;
+ if (flag->shortName) shortFlags[flag->shortName] = flag;
}
void Args::parseCmdline(const Strings & _cmdline)
@@ -61,7 +59,7 @@ void Args::parseCmdline(const Strings & _cmdline)
void Args::printHelp(const string & programName, std::ostream & out)
{
- std::cout << "Usage: " << programName << " <FLAGS>...";
+ std::cout << fmt(ANSI_BOLD "Usage:" ANSI_NORMAL " %s " ANSI_ITALIC "FLAGS..." ANSI_NORMAL, programName);
for (auto & exp : expectedArgs) {
std::cout << renderLabels({exp.label});
// FIXME: handle arity > 1
@@ -72,11 +70,11 @@ void Args::printHelp(const string & programName, std::ostream & out)
auto s = description();
if (s != "")
- std::cout << "\nSummary: " << s << ".\n";
+ std::cout << "\n" ANSI_BOLD "Summary:" ANSI_NORMAL " " << s << ".\n";
if (longFlags.size()) {
std::cout << "\n";
- std::cout << "Flags:\n";
+ std::cout << ANSI_BOLD "Flags:" ANSI_NORMAL "\n";
printFlags(out);
}
}
@@ -101,15 +99,14 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
auto process = [&](const std::string & name, const Flag & flag) -> bool {
++pos;
std::vector<std::string> args;
- for (size_t n = 0 ; n < flag.arity; ++n) {
+ for (size_t n = 0 ; n < flag.handler.arity; ++n) {
if (pos == end) {
- if (flag.arity == ArityAny) break;
- throw UsageError(format("flag '%1%' requires %2% argument(s)")
- % name % flag.arity);
+ if (flag.handler.arity == ArityAny) break;
+ throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
}
args.push_back(*pos++);
}
- flag.handler(std::move(args));
+ flag.handler.fun(std::move(args));
return true;
};
@@ -157,17 +154,18 @@ bool Args::processArgs(const Strings & args, bool finish)
return res;
}
-Args::FlagMaker & Args::FlagMaker::mkHashTypeFlag(HashType * ht)
+Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
{
- arity(1);
- label("type");
- description("hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')");
- handler([ht](std::string s) {
- *ht = parseHashType(s);
- if (*ht == htUnknown)
- throw UsageError("unknown hash type '%1%'", s);
- });
- return *this;
+ return Flag {
+ .longName = std::move(longName),
+ .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
+ .labels = {"hash-algo"},
+ .handler = {[ht](std::string s) {
+ *ht = parseHashType(s);
+ if (*ht == htUnknown)
+ throw UsageError("unknown hash type '%1%'", s);
+ }}
+ };
}
Strings argvToStrings(int argc, char * * argv)
@@ -183,7 +181,7 @@ std::string renderLabels(const Strings & labels)
std::string res;
for (auto label : labels) {
for (auto & c : label) c = std::toupper(c);
- res += " <" + label + ">";
+ res += " " ANSI_ITALIC + label + ANSI_NORMAL;
}
return res;
}
@@ -192,10 +190,10 @@ void printTable(std::ostream & out, const Table2 & table)
{
size_t max = 0;
for (auto & row : table)
- max = std::max(max, row.first.size());
+ max = std::max(max, filterANSIEscapes(row.first, true).size());
for (auto & row : table) {
out << " " << row.first
- << std::string(max - row.first.size() + 2, ' ')
+ << std::string(max - filterANSIEscapes(row.first, true).size() + 2, ' ')
<< row.second << "\n";
}
}
@@ -206,8 +204,7 @@ void Command::printHelp(const string & programName, std::ostream & out)
auto exs = examples();
if (!exs.empty()) {
- out << "\n";
- out << "Examples:\n";
+ out << "\n" ANSI_BOLD "Examples:" ANSI_NORMAL "\n";
for (auto & ex : exs)
out << "\n"
<< " " << ex.description << "\n" // FIXME: wrap
@@ -223,49 +220,55 @@ MultiCommand::MultiCommand(const Commands & commands)
auto i = commands.find(ss[0]);
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", ss[0]);
- command = i->second();
- command->_name = ss[0];
+ command = {ss[0], i->second()};
}});
+
+ categories[Command::catDefault] = "Available commands";
}
void MultiCommand::printHelp(const string & programName, std::ostream & out)
{
if (command) {
- command->printHelp(programName + " " + command->name(), out);
+ command->second->printHelp(programName + " " + command->first, out);
return;
}
- out << "Usage: " << programName << " <COMMAND> <FLAGS>... <ARGS>...\n";
+ out << fmt(ANSI_BOLD "Usage:" ANSI_NORMAL " %s " ANSI_ITALIC "COMMAND FLAGS... ARGS..." ANSI_NORMAL "\n", programName);
- out << "\n";
- out << "Common flags:\n";
+ out << "\n" ANSI_BOLD "Common flags:" ANSI_NORMAL "\n";
printFlags(out);
- out << "\n";
- out << "Available commands:\n";
+ std::map<Command::Category, std::map<std::string, ref<Command>>> commandsByCategory;
- Table2 table;
- for (auto & i : commands) {
- auto command = i.second();
- command->_name = i.first;
- auto descr = command->description();
- if (!descr.empty())
- table.push_back(std::make_pair(command->name(), descr));
+ for (auto & [name, commandFun] : commands) {
+ auto command = commandFun();
+ commandsByCategory[command->category()].insert_or_assign(name, command);
+ }
+
+ for (auto & [category, commands] : commandsByCategory) {
+ out << fmt("\n" ANSI_BOLD "%s:" ANSI_NORMAL "\n", categories[category]);
+
+ Table2 table;
+ for (auto & [name, command] : commands) {
+ auto descr = command->description();
+ if (!descr.empty())
+ table.push_back(std::make_pair(name, descr));
+ }
+ printTable(out, table);
}
- printTable(out, table);
}
bool MultiCommand::processFlag(Strings::iterator & pos, Strings::iterator end)
{
if (Args::processFlag(pos, end)) return true;
- if (command && command->processFlag(pos, end)) return true;
+ if (command && command->second->processFlag(pos, end)) return true;
return false;
}
bool MultiCommand::processArgs(const Strings & args, bool finish)
{
if (command)
- return command->processArgs(args, finish);
+ return command->second->processArgs(args, finish);
else
return Args::processArgs(args, finish);
}
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 967efbe1c..1932e6a8a 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -32,13 +32,59 @@ protected:
struct Flag
{
typedef std::shared_ptr<Flag> ptr;
+
+ struct Handler
+ {
+ std::function<void(std::vector<std::string>)> fun;
+ size_t arity;
+
+ Handler() {}
+
+ Handler(std::function<void(std::vector<std::string>)> && fun)
+ : fun(std::move(fun))
+ , arity(ArityAny)
+ { }
+
+ Handler(std::function<void()> && handler)
+ : fun([handler{std::move(handler)}](std::vector<std::string>) { handler(); })
+ , arity(0)
+ { }
+
+ Handler(std::function<void(std::string)> && handler)
+ : fun([handler{std::move(handler)}](std::vector<std::string> ss) {
+ handler(std::move(ss[0]));
+ })
+ , arity(1)
+ { }
+
+ Handler(std::function<void(std::string, std::string)> && handler)
+ : fun([handler{std::move(handler)}](std::vector<std::string> ss) {
+ handler(std::move(ss[0]), std::move(ss[1]));
+ })
+ , arity(2)
+ { }
+
+ template<class T>
+ Handler(T * dest)
+ : fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
+ , arity(1)
+ { }
+
+ template<class T>
+ Handler(T * dest, const T & val)
+ : fun([=](std::vector<std::string> ss) { *dest = val; })
+ , arity(0)
+ { }
+ };
+
std::string longName;
char shortName = 0;
std::string description;
- Strings labels;
- size_t arity = 0;
- std::function<void(std::vector<std::string>)> handler;
std::string category;
+ Strings labels;
+ Handler handler;
+
+ static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
};
std::map<std::string, Flag::ptr> longFlags;
@@ -65,49 +111,7 @@ protected:
public:
- class FlagMaker
- {
- Args & args;
- Flag::ptr flag;
- friend class Args;
- FlagMaker(Args & args) : args(args), flag(std::make_shared<Flag>()) { }
- public:
- ~FlagMaker();
- FlagMaker & longName(const std::string & s) { flag->longName = s; return *this; }
- FlagMaker & shortName(char s) { flag->shortName = s; return *this; }
- FlagMaker & description(const std::string & s) { flag->description = s; return *this; }
- FlagMaker & label(const std::string & l) { flag->arity = 1; flag->labels = {l}; return *this; }
- FlagMaker & labels(const Strings & ls) { flag->arity = ls.size(); flag->labels = ls; return *this; }
- FlagMaker & arity(size_t arity) { flag->arity = arity; return *this; }
- FlagMaker & handler(std::function<void(std::vector<std::string>)> handler) { flag->handler = handler; return *this; }
- FlagMaker & handler(std::function<void()> handler) { flag->handler = [handler](std::vector<std::string>) { handler(); }; return *this; }
- FlagMaker & handler(std::function<void(std::string)> handler) {
- flag->arity = 1;
- flag->handler = [handler](std::vector<std::string> ss) { handler(std::move(ss[0])); };
- return *this;
- }
- FlagMaker & category(const std::string & s) { flag->category = s; return *this; }
-
- template<class T>
- FlagMaker & dest(T * dest)
- {
- flag->arity = 1;
- flag->handler = [=](std::vector<std::string> ss) { *dest = ss[0]; };
- return *this;
- }
-
- template<class T>
- FlagMaker & set(T * dest, const T & val)
- {
- flag->arity = 0;
- flag->handler = [=](std::vector<std::string> ss) { *dest = val; };
- return *this;
- }
-
- FlagMaker & mkHashTypeFlag(HashType * ht);
- };
-
- FlagMaker mkFlag();
+ void addFlag(Flag && flag);
/* Helper functions for constructing flags / positional
arguments. */
@@ -116,13 +120,13 @@ public:
const std::string & label, const std::string & description,
std::function<void(std::string)> fun)
{
- mkFlag()
- .shortName(shortName)
- .longName(longName)
- .labels({label})
- .description(description)
- .arity(1)
- .handler([=](std::vector<std::string> ss) { fun(ss[0]); });
+ addFlag({
+ .longName = longName,
+ .shortName = shortName,
+ .description = description,
+ .labels = {label},
+ .handler = {[=](std::string s) { fun(s); }}
+ });
}
void mkFlag(char shortName, const std::string & name,
@@ -135,11 +139,12 @@ public:
void mkFlag(char shortName, const std::string & longName, const std::string & description,
T * dest, const T & value)
{
- mkFlag()
- .shortName(shortName)
- .longName(longName)
- .description(description)
- .handler([=](std::vector<std::string> ss) { *dest = value; });
+ addFlag({
+ .longName = longName,
+ .shortName = shortName,
+ .description = description,
+ .handler = {[=]() { *dest = value; }}
+ });
}
template<class I>
@@ -155,18 +160,18 @@ public:
void mkFlag(char shortName, const std::string & longName,
const std::string & description, std::function<void(I)> fun)
{
- mkFlag()
- .shortName(shortName)
- .longName(longName)
- .labels({"N"})
- .description(description)
- .arity(1)
- .handler([=](std::vector<std::string> ss) {
+ addFlag({
+ .longName = longName,
+ .shortName = shortName,
+ .description = description,
+ .labels = {"N"},
+ .handler = {[=](std::string s) {
I n;
- if (!string2Int(ss[0], n))
+ if (!string2Int(s, n))
throw UsageError("flag '--%s' requires a integer argument", longName);
fun(n);
- });
+ }}
+ });
}
/* Expect a string argument. */
@@ -192,17 +197,10 @@ public:
run() method. */
struct Command : virtual Args
{
-private:
- std::string _name;
-
friend class MultiCommand;
-public:
-
virtual ~Command() { }
- std::string name() { return _name; }
-
virtual void prepare() { };
virtual void run() = 0;
@@ -216,6 +214,12 @@ public:
virtual Examples examples() { return Examples(); }
+ typedef int Category;
+
+ static constexpr Category catDefault = 0;
+
+ virtual Category category() { return catDefault; }
+
void printHelp(const string & programName, std::ostream & out) override;
};
@@ -228,7 +232,10 @@ class MultiCommand : virtual Args
public:
Commands commands;
- std::shared_ptr<Command> command;
+ std::map<Command::Category, std::string> categories;
+
+ // Selected command, if any.
+ std::optional<std::pair<std::string, ref<Command>>> command;
MultiCommand(const Commands & commands);
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 7551d97d1..f03e444ec 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -177,12 +177,13 @@ void BaseSetting<T>::toJSON(JSONPlaceholder & out)
template<typename T>
void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
{
- args.mkFlag()
- .longName(name)
- .description(description)
- .arity(1)
- .handler([=](std::vector<std::string> ss) { overriden = true; set(ss[0]); })
- .category(category);
+ args.addFlag({
+ .longName = name,
+ .description = description,
+ .category = category,
+ .labels = {"value"},
+ .handler = {[=](std::string s) { overriden = true; set(s); }},
+ });
}
template<> void BaseSetting<std::string>::set(const std::string & str)
@@ -227,16 +228,18 @@ template<> std::string BaseSetting<bool>::to_string() const
template<> void BaseSetting<bool>::convertToArg(Args & args, const std::string & category)
{
- args.mkFlag()
- .longName(name)
- .description(description)
- .handler([=](std::vector<std::string> ss) { override(true); })
- .category(category);
- args.mkFlag()
- .longName("no-" + name)
- .description(description)
- .handler([=](std::vector<std::string> ss) { override(false); })
- .category(category);
+ args.addFlag({
+ .longName = name,
+ .description = description,
+ .category = category,
+ .handler = {[=]() { override(true); }}
+ });
+ args.addFlag({
+ .longName = "no-" + name,
+ .description = description,
+ .category = category,
+ .handler = {[=]() { override(false); }}
+ });
}
template<> void BaseSetting<Strings>::set(const std::string & str)
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
new file mode 100644
index 000000000..a5571d4ec
--- /dev/null
+++ b/src/libutil/error.cc
@@ -0,0 +1,146 @@
+#include "error.hh"
+
+#include <iostream>
+#include <optional>
+
+namespace nix
+{
+
+std::optional<string> ErrorInfo::programName = std::nullopt;
+
+std::ostream& operator<<(std::ostream &os, const hintformat &hf)
+{
+ return os << hf.str();
+}
+
+string showErrPos(const ErrPos &errPos)
+{
+ if (errPos.column > 0) {
+ return fmt("(%1%:%2%)", errPos.lineNumber, errPos.column);
+ } else {
+ return fmt("(%1%)", errPos.lineNumber);
+ };
+}
+
+void printCodeLines(const string &prefix, const NixCode &nixCode)
+{
+ // previous line of code.
+ if (nixCode.prevLineOfCode.has_value()) {
+ std::cout << fmt("%1% %|2$5d|| %3%",
+ prefix,
+ (nixCode.errPos.lineNumber - 1),
+ *nixCode.prevLineOfCode)
+ << std::endl;
+ }
+
+ // line of code containing the error.%2$+5d%
+ std::cout << fmt("%1% %|2$5d|| %3%",
+ prefix,
+ (nixCode.errPos.lineNumber),
+ nixCode.errLineOfCode)
+ << std::endl;
+
+ // error arrows for the column range.
+ if (nixCode.errPos.column > 0) {
+ int start = nixCode.errPos.column;
+ std::string spaces;
+ for (int i = 0; i < start; ++i) {
+ spaces.append(" ");
+ }
+
+ std::string arrows("^");
+
+ std::cout << fmt("%1% |%2%" ANSI_RED "%3%" ANSI_NORMAL,
+ prefix,
+ spaces,
+ arrows) << std::endl;
+ }
+
+ // next line of code.
+ if (nixCode.nextLineOfCode.has_value()) {
+ std::cout << fmt("%1% %|2$5d|| %3%",
+ prefix,
+ (nixCode.errPos.lineNumber + 1),
+ *nixCode.nextLineOfCode)
+ << std::endl;
+ }
+}
+
+void printErrorInfo(const ErrorInfo &einfo)
+{
+ int errwidth = 80;
+ string prefix = " ";
+
+ string levelString;
+ switch (einfo.level) {
+ case ErrLevel::elError: {
+ levelString = ANSI_RED;
+ levelString += "error:";
+ levelString += ANSI_NORMAL;
+ break;
+ }
+ case ErrLevel::elWarning: {
+ levelString = ANSI_YELLOW;
+ levelString += "warning:";
+ levelString += ANSI_NORMAL;
+ break;
+ }
+ default: {
+ levelString = fmt("invalid error level: %1%", einfo.level);
+ break;
+ }
+ }
+
+ int ndl = prefix.length() + levelString.length() + 3 + einfo.name.length() + einfo.programName.value_or("").length();
+ int dashwidth = ndl > (errwidth - 3) ? 3 : errwidth - ndl;
+
+ string dashes;
+ for (int i = 0; i < dashwidth; ++i)
+ dashes.append("-");
+
+ // divider.
+ std::cout << fmt("%1%%2%" ANSI_BLUE " %3% %4% %5% %6%" ANSI_NORMAL,
+ prefix,
+ levelString,
+ "---",
+ einfo.name,
+ dashes,
+ einfo.programName.value_or(""))
+ << std::endl;
+
+ // filename.
+ if (einfo.nixCode.has_value()) {
+ if (einfo.nixCode->errPos.nixFile != "") {
+ string eline = einfo.nixCode->errLineOfCode != ""
+ ? string(" ") + showErrPos(einfo.nixCode->errPos)
+ : "";
+
+ std::cout << fmt("%1%in file: " ANSI_BLUE "%2%%3%" ANSI_NORMAL,
+ prefix,
+ einfo.nixCode->errPos.nixFile,
+ eline) << std::endl;
+ std::cout << prefix << std::endl;
+ } else {
+ std::cout << fmt("%1%from command line argument", prefix) << std::endl;
+ std::cout << prefix << std::endl;
+ }
+ }
+
+ // description
+ std::cout << prefix << einfo.description << std::endl;
+ std::cout << prefix << std::endl;
+
+ // lines of code.
+ if (einfo.nixCode->errLineOfCode != "") {
+ printCodeLines(prefix, *einfo.nixCode);
+ std::cout << prefix << std::endl;
+ }
+
+ // hint
+ if (einfo.hint.has_value()) {
+ std::cout << prefix << *einfo.hint << std::endl;
+ std::cout << prefix << std::endl;
+ }
+}
+
+}
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
new file mode 100644
index 000000000..f402b692e
--- /dev/null
+++ b/src/libutil/error.hh
@@ -0,0 +1,121 @@
+#ifndef error_hh
+#define error_hh
+
+#include "ansicolor.hh"
+#include <string>
+#include <optional>
+#include <iostream>
+#include "types.hh"
+
+namespace nix
+{
+
+typedef enum {
+ elWarning,
+ elError
+} ErrLevel;
+
+struct ErrPos
+{
+ int lineNumber;
+ int column;
+ string nixFile;
+
+ template <class P>
+ ErrPos& operator=(const P &pos)
+ {
+ lineNumber = pos.line;
+ column = pos.column;
+ nixFile = pos.file;
+ return *this;
+ }
+
+ template <class P>
+ ErrPos(const P &p)
+ {
+ *this = p;
+ }
+};
+
+struct NixCode
+{
+ ErrPos errPos;
+ std::optional<string> prevLineOfCode;
+ string errLineOfCode;
+ std::optional<string> nextLineOfCode;
+};
+
+// ----------------------------------------------------------------
+// format function for hints. same as fmt, except templated values
+// are always in yellow.
+
+template <class T>
+struct yellowify
+{
+ yellowify(T &s) : value(s) {}
+ T &value;
+};
+
+template <class T>
+std::ostream& operator<<(std::ostream &out, const yellowify<T> &y)
+{
+ return out << ANSI_YELLOW << y.value << ANSI_NORMAL;
+}
+
+class hintformat
+{
+public:
+ hintformat(string format) :fmt(format)
+ {
+ fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
+ }
+ template<class T>
+ hintformat& operator%(const T &value)
+ {
+ fmt % yellowify(value);
+ return *this;
+ }
+
+ std::string str() const
+ {
+ return fmt.str();
+ }
+
+ template <typename U>
+ friend class AddHint;
+private:
+ format fmt;
+};
+
+std::ostream& operator<<(std::ostream &os, const hintformat &hf);
+
+template<typename... Args>
+inline hintformat hintfmt(const std::string & fs, const Args & ... args)
+{
+ hintformat f(fs);
+ formatHelper(f, args...);
+ return f;
+}
+
+// -------------------------------------------------
+// ErrorInfo.
+struct ErrorInfo
+{
+ ErrLevel level;
+ string name;
+ string description;
+ std::optional<hintformat> hint;
+ std::optional<NixCode> nixCode;
+
+ static std::optional<string> programName;
+};
+
+// --------------------------------------------------------
+// error printing
+
+// just to cout for now.
+void printErrorInfo(const ErrorInfo &einfo);
+
+}
+
+#endif
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index fa5c84a27..3cc4ef8f1 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -3,6 +3,7 @@
#include <atomic>
#include <nlohmann/json.hpp>
+#include <iostream>
namespace nix {
@@ -24,6 +25,11 @@ void Logger::warn(const std::string & msg)
log(lvlWarn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg);
}
+void Logger::writeToStdout(std::string_view s)
+{
+ std::cout << s << "\n";
+}
+
class SimpleLogger : public Logger
{
public:
@@ -198,7 +204,7 @@ bool handleJSONLogMessage(const std::string & msg,
if (action == "start") {
auto type = (ActivityType) json["type"];
- if (trusted || type == actDownload)
+ if (trusted || type == actFileTransfer)
activities.emplace(std::piecewise_construct,
std::forward_as_tuple(json["id"]),
std::forward_as_tuple(*logger, (Verbosity) json["level"], type,
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index beb5e6b64..18c24d508 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -17,7 +17,7 @@ typedef enum {
typedef enum {
actUnknown = 0,
actCopyPath = 100,
- actDownload = 101,
+ actFileTransfer = 101,
actRealise = 102,
actCopyPaths = 103,
actBuilds = 104,
@@ -78,6 +78,16 @@ public:
virtual void stopActivity(ActivityId act) { };
virtual void result(ActivityId act, ResultType type, const Fields & fields) { };
+
+ virtual void writeToStdout(std::string_view s);
+
+ template<typename... Args>
+ inline void stdout(const std::string & fs, const Args & ... args)
+ {
+ boost::format f(fs);
+ formatHelper(f, args...);
+ writeToStdout(f.str());
+ }
};
ActivityId getCurActivity();
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 5780c93a6..a04118512 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -148,6 +148,9 @@ struct StringSink : Sink
{
ref<std::string> s;
StringSink() : s(make_ref<std::string>()) { };
+ explicit StringSink(const size_t reservedSize) : s(make_ref<std::string>()) {
+ s->reserve(reservedSize);
+ };
StringSink(ref<std::string> s) : s(s) { };
void operator () (const unsigned char * data, size_t len) override;
};
diff --git a/src/libutil/tests/local.mk b/src/libutil/tests/local.mk
new file mode 100644
index 000000000..a297edb64
--- /dev/null
+++ b/src/libutil/tests/local.mk
@@ -0,0 +1,15 @@
+check: libutil-tests_RUN
+
+programs += libutil-tests
+
+libutil-tests_DIR := $(d)
+
+libutil-tests_INSTALL_DIR :=
+
+libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
+
+libutil-tests_CXXFLAGS += -I src/libutil
+
+libutil-tests_LIBS = libutil
+
+libutil-tests_LDFLAGS := $(GTEST_LIBS)
diff --git a/src/libutil/tests/tests.cc b/src/libutil/tests/tests.cc
new file mode 100644
index 000000000..8e77ccbe1
--- /dev/null
+++ b/src/libutil/tests/tests.cc
@@ -0,0 +1,589 @@
+#include "util.hh"
+#include "types.hh"
+
+#include <gtest/gtest.h>
+
+namespace nix {
+
+/* ----------- tests for util.hh ------------------------------------------------*/
+
+ /* ----------------------------------------------------------------------------
+ * absPath
+ * --------------------------------------------------------------------------*/
+
+ TEST(absPath, doesntChangeRoot) {
+ auto p = absPath("/");
+
+ ASSERT_EQ(p, "/");
+ }
+
+
+
+
+ TEST(absPath, turnsEmptyPathIntoCWD) {
+ char cwd[PATH_MAX+1];
+ auto p = absPath("");
+
+ ASSERT_EQ(p, getcwd((char*)&cwd, PATH_MAX));
+ }
+
+ TEST(absPath, usesOptionalBasePathWhenGiven) {
+ char _cwd[PATH_MAX+1];
+ char* cwd = getcwd((char*)&_cwd, PATH_MAX);
+
+ auto p = absPath("", cwd);
+
+ ASSERT_EQ(p, cwd);
+ }
+
+ TEST(absPath, isIdempotent) {
+ char _cwd[PATH_MAX+1];
+ char* cwd = getcwd((char*)&_cwd, PATH_MAX);
+ auto p1 = absPath(cwd);
+ auto p2 = absPath(p1);
+
+ ASSERT_EQ(p1, p2);
+ }
+
+
+ TEST(absPath, pathIsCanonicalised) {
+ auto path = "/some/path/with/trailing/dot/.";
+ auto p1 = absPath(path);
+ auto p2 = absPath(p1);
+
+ ASSERT_EQ(p1, "/some/path/with/trailing/dot");
+ ASSERT_EQ(p1, p2);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * canonPath
+ * --------------------------------------------------------------------------*/
+
+ TEST(canonPath, removesTrailingSlashes) {
+ auto path = "/this/is/a/path//";
+ auto p = canonPath(path);
+
+ ASSERT_EQ(p, "/this/is/a/path");
+ }
+
+ TEST(canonPath, removesDots) {
+ auto path = "/this/./is/a/path/./";
+ auto p = canonPath(path);
+
+ ASSERT_EQ(p, "/this/is/a/path");
+ }
+
+ TEST(canonPath, removesDots2) {
+ auto path = "/this/a/../is/a////path/foo/..";
+ auto p = canonPath(path);
+
+ ASSERT_EQ(p, "/this/is/a/path");
+ }
+
+ TEST(canonPath, requiresAbsolutePath) {
+ ASSERT_ANY_THROW(canonPath("."));
+ ASSERT_ANY_THROW(canonPath(".."));
+ ASSERT_ANY_THROW(canonPath("../"));
+ ASSERT_DEATH({ canonPath(""); }, "path != \"\"");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * dirOf
+ * --------------------------------------------------------------------------*/
+
+ TEST(dirOf, returnsEmptyStringForRoot) {
+ auto p = dirOf("/");
+
+ ASSERT_EQ(p, "/");
+ }
+
+ TEST(dirOf, returnsFirstPathComponent) {
+ auto p1 = dirOf("/dir/");
+ ASSERT_EQ(p1, "/dir");
+ auto p2 = dirOf("/dir");
+ ASSERT_EQ(p2, "/");
+ auto p3 = dirOf("/dir/..");
+ ASSERT_EQ(p3, "/dir");
+ auto p4 = dirOf("/dir/../");
+ ASSERT_EQ(p4, "/dir/..");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * baseNameOf
+ * --------------------------------------------------------------------------*/
+
+ TEST(baseNameOf, emptyPath) {
+ auto p1 = baseNameOf("");
+ ASSERT_EQ(p1, "");
+ }
+
+ TEST(baseNameOf, pathOnRoot) {
+ auto p1 = baseNameOf("/dir");
+ ASSERT_EQ(p1, "dir");
+ }
+
+ TEST(baseNameOf, relativePath) {
+ auto p1 = baseNameOf("dir/foo");
+ ASSERT_EQ(p1, "foo");
+ }
+
+ TEST(baseNameOf, pathWithTrailingSlashRoot) {
+ auto p1 = baseNameOf("/");
+ ASSERT_EQ(p1, "");
+ }
+
+ TEST(baseNameOf, trailingSlash) {
+ auto p1 = baseNameOf("/dir/");
+ ASSERT_EQ(p1, "dir");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * isInDir
+ * --------------------------------------------------------------------------*/
+
+ TEST(isInDir, trivialCase) {
+ auto p1 = isInDir("/foo/bar", "/foo");
+ ASSERT_EQ(p1, true);
+ }
+
+ TEST(isInDir, notInDir) {
+ auto p1 = isInDir("/zes/foo/bar", "/foo");
+ ASSERT_EQ(p1, false);
+ }
+
+ // XXX: hm, bug or feature? :) Looking at the implementation
+ // this might be problematic.
+ TEST(isInDir, emptyDir) {
+ auto p1 = isInDir("/zes/foo/bar", "");
+ ASSERT_EQ(p1, true);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * isDirOrInDir
+ * --------------------------------------------------------------------------*/
+
+ TEST(isDirOrInDir, trueForSameDirectory) {
+ ASSERT_EQ(isDirOrInDir("/nix", "/nix"), true);
+ ASSERT_EQ(isDirOrInDir("/", "/"), true);
+ }
+
+ TEST(isDirOrInDir, trueForEmptyPaths) {
+ ASSERT_EQ(isDirOrInDir("", ""), true);
+ }
+
+ TEST(isDirOrInDir, falseForDisjunctPaths) {
+ ASSERT_EQ(isDirOrInDir("/foo", "/bar"), false);
+ }
+
+ TEST(isDirOrInDir, relativePaths) {
+ ASSERT_EQ(isDirOrInDir("/foo/..", "/foo"), true);
+ }
+
+ // XXX: while it is possible to use "." or ".." in the
+ // first argument this doesn't seem to work in the second.
+ TEST(isDirOrInDir, DISABLED_shouldWork) {
+ ASSERT_EQ(isDirOrInDir("/foo/..", "/foo/."), true);
+
+ }
+
+ /* ----------------------------------------------------------------------------
+ * pathExists
+ * --------------------------------------------------------------------------*/
+
+ TEST(pathExists, rootExists) {
+ ASSERT_TRUE(pathExists("/"));
+ }
+
+ TEST(pathExists, cwdExists) {
+ ASSERT_TRUE(pathExists("."));
+ }
+
+ TEST(pathExists, bogusPathDoesNotExist) {
+ ASSERT_FALSE(pathExists("/home/schnitzel/darmstadt/pommes"));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * concatStringsSep
+ * --------------------------------------------------------------------------*/
+
+ TEST(concatStringsSep, buildCommaSeparatedString) {
+ Strings strings;
+ strings.push_back("this");
+ strings.push_back("is");
+ strings.push_back("great");
+
+ ASSERT_EQ(concatStringsSep(",", strings), "this,is,great");
+ }
+
+ TEST(concatStringsSep, buildStringWithEmptySeparator) {
+ Strings strings;
+ strings.push_back("this");
+ strings.push_back("is");
+ strings.push_back("great");
+
+ ASSERT_EQ(concatStringsSep("", strings), "thisisgreat");
+ }
+
+ TEST(concatStringsSep, buildSingleString) {
+ Strings strings;
+ strings.push_back("this");
+
+ ASSERT_EQ(concatStringsSep(",", strings), "this");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * hasPrefix
+ * --------------------------------------------------------------------------*/
+
+ TEST(hasPrefix, emptyStringHasNoPrefix) {
+ ASSERT_FALSE(hasPrefix("", "foo"));
+ }
+
+ TEST(hasPrefix, emptyStringIsAlwaysPrefix) {
+ ASSERT_TRUE(hasPrefix("foo", ""));
+ ASSERT_TRUE(hasPrefix("jshjkfhsadf", ""));
+ }
+
+ TEST(hasPrefix, trivialCase) {
+ ASSERT_TRUE(hasPrefix("foobar", "foo"));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * hasSuffix
+ * --------------------------------------------------------------------------*/
+
+ TEST(hasSuffix, emptyStringHasNoSuffix) {
+ ASSERT_FALSE(hasSuffix("", "foo"));
+ }
+
+ TEST(hasSuffix, trivialCase) {
+ ASSERT_TRUE(hasSuffix("foo", "foo"));
+ ASSERT_TRUE(hasSuffix("foobar", "bar"));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * base64Encode
+ * --------------------------------------------------------------------------*/
+
+ TEST(base64Encode, emptyString) {
+ ASSERT_EQ(base64Encode(""), "");
+ }
+
+ TEST(base64Encode, encodesAString) {
+ ASSERT_EQ(base64Encode("quod erat demonstrandum"), "cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0=");
+ }
+
+ TEST(base64Encode, encodeAndDecode) {
+ auto s = "quod erat demonstrandum";
+ auto encoded = base64Encode(s);
+ auto decoded = base64Decode(encoded);
+
+ ASSERT_EQ(decoded, s);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * base64Decode
+ * --------------------------------------------------------------------------*/
+
+ TEST(base64Decode, emptyString) {
+ ASSERT_EQ(base64Decode(""), "");
+ }
+
+ TEST(base64Decode, decodeAString) {
+ ASSERT_EQ(base64Decode("cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="), "quod erat demonstrandum");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * toLower
+ * --------------------------------------------------------------------------*/
+
+ TEST(toLower, emptyString) {
+ ASSERT_EQ(toLower(""), "");
+ }
+
+ TEST(toLower, nonLetters) {
+ auto s = "!@(*$#)(@#=\\234_";
+ ASSERT_EQ(toLower(s), s);
+ }
+
+ // std::tolower() doesn't handle unicode characters. In the context of
+ // store paths this isn't relevant but doesn't hurt to record this behavior
+ // here.
+ TEST(toLower, umlauts) {
+ auto s = "ÄÖÜ";
+ ASSERT_EQ(toLower(s), "ÄÖÜ");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * string2Float
+ * --------------------------------------------------------------------------*/
+
+ TEST(string2Float, emptyString) {
+ double n;
+ ASSERT_EQ(string2Float("", n), false);
+ }
+
+ TEST(string2Float, trivialConversions) {
+ double n;
+ ASSERT_EQ(string2Float("1.0", n), true);
+ ASSERT_EQ(n, 1.0);
+
+ ASSERT_EQ(string2Float("0.0", n), true);
+ ASSERT_EQ(n, 0.0);
+
+ ASSERT_EQ(string2Float("-100.25", n), true);
+ ASSERT_EQ(n, (-100.25));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * string2Int
+ * --------------------------------------------------------------------------*/
+
+ TEST(string2Int, emptyString) {
+ double n;
+ ASSERT_EQ(string2Int("", n), false);
+ }
+
+ TEST(string2Int, trivialConversions) {
+ double n;
+ ASSERT_EQ(string2Int("1", n), true);
+ ASSERT_EQ(n, 1);
+
+ ASSERT_EQ(string2Int("0", n), true);
+ ASSERT_EQ(n, 0);
+
+ ASSERT_EQ(string2Int("-100", n), true);
+ ASSERT_EQ(n, (-100));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * statusOk
+ * --------------------------------------------------------------------------*/
+
+ TEST(statusOk, zeroIsOk) {
+ ASSERT_EQ(statusOk(0), true);
+ ASSERT_EQ(statusOk(1), false);
+ }
+
+
+ /* ----------------------------------------------------------------------------
+ * rewriteStrings
+ * --------------------------------------------------------------------------*/
+
+ TEST(rewriteStrings, emptyString) {
+ StringMap rewrites;
+ rewrites["this"] = "that";
+
+ ASSERT_EQ(rewriteStrings("", rewrites), "");
+ }
+
+ TEST(rewriteStrings, emptyRewrites) {
+ StringMap rewrites;
+
+ ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
+ }
+
+ TEST(rewriteStrings, successfulRewrite) {
+ StringMap rewrites;
+ rewrites["this"] = "that";
+
+ ASSERT_EQ(rewriteStrings("this and that", rewrites), "that and that");
+ }
+
+ TEST(rewriteStrings, doesntOccur) {
+ StringMap rewrites;
+ rewrites["foo"] = "bar";
+
+ ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * replaceStrings
+ * --------------------------------------------------------------------------*/
+
+ TEST(replaceStrings, emptyString) {
+ ASSERT_EQ(replaceStrings("", "this", "that"), "");
+ ASSERT_EQ(replaceStrings("this and that", "", ""), "this and that");
+ }
+
+ TEST(replaceStrings, successfulReplace) {
+ ASSERT_EQ(replaceStrings("this and that", "this", "that"), "that and that");
+ }
+
+ TEST(replaceStrings, doesntOccur) {
+ ASSERT_EQ(replaceStrings("this and that", "foo", "bar"), "this and that");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * trim
+ * --------------------------------------------------------------------------*/
+
+ TEST(trim, emptyString) {
+ ASSERT_EQ(trim(""), "");
+ }
+
+ TEST(trim, removesWhitespace) {
+ ASSERT_EQ(trim("foo"), "foo");
+ ASSERT_EQ(trim(" foo "), "foo");
+ ASSERT_EQ(trim(" foo bar baz"), "foo bar baz");
+ ASSERT_EQ(trim(" \t foo bar baz\n"), "foo bar baz");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * chomp
+ * --------------------------------------------------------------------------*/
+
+ TEST(chomp, emptyString) {
+ ASSERT_EQ(chomp(""), "");
+ }
+
+ TEST(chomp, removesWhitespace) {
+ ASSERT_EQ(chomp("foo"), "foo");
+ ASSERT_EQ(chomp("foo "), "foo");
+ ASSERT_EQ(chomp(" foo "), " foo");
+ ASSERT_EQ(chomp(" foo bar baz "), " foo bar baz");
+ ASSERT_EQ(chomp("\t foo bar baz\n"), "\t foo bar baz");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * quoteStrings
+ * --------------------------------------------------------------------------*/
+
+ TEST(quoteStrings, empty) {
+ Strings s = { };
+ Strings expected = { };
+
+ ASSERT_EQ(quoteStrings(s), expected);
+ }
+
+ TEST(quoteStrings, emptyStrings) {
+ Strings s = { "", "", "" };
+ Strings expected = { "''", "''", "''" };
+ ASSERT_EQ(quoteStrings(s), expected);
+
+ }
+
+ TEST(quoteStrings, trivialQuote) {
+ Strings s = { "foo", "bar", "baz" };
+ Strings expected = { "'foo'", "'bar'", "'baz'" };
+
+ ASSERT_EQ(quoteStrings(s), expected);
+ }
+
+ TEST(quoteStrings, quotedStrings) {
+ Strings s = { "'foo'", "'bar'", "'baz'" };
+ Strings expected = { "''foo''", "''bar''", "''baz''" };
+
+ ASSERT_EQ(quoteStrings(s), expected);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * tokenizeString
+ * --------------------------------------------------------------------------*/
+
+ TEST(tokenizeString, empty) {
+ Strings expected = { };
+
+ ASSERT_EQ(tokenizeString<Strings>(""), expected);
+ }
+
+ TEST(tokenizeString, tokenizeSpacesWithDefaults) {
+ auto s = "foo bar baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsWithDefaults) {
+ auto s = "foo\tbar\tbaz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsSpacesWithDefaults) {
+ auto s = "foo\t bar\t baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsSpacesNewlineWithDefaults) {
+ auto s = "foo\t\n bar\t\n baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsSpacesNewlineRetWithDefaults) {
+ auto s = "foo\t\n\r bar\t\n\r baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+
+ auto s2 = "foo \t\n\r bar \t\n\r baz";
+ Strings expected2 = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s2), expected2);
+ }
+
+ TEST(tokenizeString, tokenizeWithCustomSep) {
+ auto s = "foo\n,bar\n,baz\n";
+ Strings expected = { "foo\n", "bar\n", "baz\n" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s, ","), expected);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * get
+ * --------------------------------------------------------------------------*/
+
+ TEST(get, emptyContainer) {
+ StringMap s = { };
+ auto expected = std::nullopt;
+
+ ASSERT_EQ(get(s, "one"), expected);
+ }
+
+ TEST(get, getFromContainer) {
+ StringMap s;
+ s["one"] = "yi";
+ s["two"] = "er";
+ auto expected = "yi";
+
+ ASSERT_EQ(get(s, "one"), expected);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * filterANSIEscapes
+ * --------------------------------------------------------------------------*/
+
+ TEST(filterANSIEscapes, emptyString) {
+ auto s = "";
+ auto expected = "";
+
+ ASSERT_EQ(filterANSIEscapes(s), expected);
+ }
+
+ TEST(filterANSIEscapes, doesntChangePrintableChars) {
+ auto s = "09 2q304ruyhr slk2-19024 kjsadh sar f";
+
+ ASSERT_EQ(filterANSIEscapes(s), s);
+ }
+
+ TEST(filterANSIEscapes, filtersColorCodes) {
+ auto s = "\u001b[30m A \u001b[31m B \u001b[32m C \u001b[33m D \u001b[0m";
+
+ ASSERT_EQ(filterANSIEscapes(s, true, 2), " A" );
+ ASSERT_EQ(filterANSIEscapes(s, true, 3), " A " );
+ ASSERT_EQ(filterANSIEscapes(s, true, 4), " A " );
+ ASSERT_EQ(filterANSIEscapes(s, true, 5), " A B" );
+ ASSERT_EQ(filterANSIEscapes(s, true, 8), " A B C" );
+ }
+
+ TEST(filterANSIEscapes, expandsTabs) {
+ auto s = "foo\tbar\tbaz";
+
+ ASSERT_EQ(filterANSIEscapes(s, true), "foo bar baz" );
+ }
+}
diff --git a/src/libutil/tests/url.cc b/src/libutil/tests/url.cc
new file mode 100644
index 000000000..80646ad3e
--- /dev/null
+++ b/src/libutil/tests/url.cc
@@ -0,0 +1,266 @@
+#include "url.hh"
+#include <gtest/gtest.h>
+
+namespace nix {
+
+/* ----------- tests for url.hh --------------------------------------------------*/
+
+ string print_map(std::map<string, string> m) {
+ std::map<string, string>::iterator it;
+ string s = "{ ";
+ for (it = m.begin(); it != m.end(); ++it) {
+ s += "{ ";
+ s += it->first;
+ s += " = ";
+ s += it->second;
+ s += " } ";
+ }
+ s += "}";
+ return s;
+ }
+
+
+ std::ostream& operator<<(std::ostream& os, const ParsedURL& p) {
+ return os << "\n"
+ << "url: " << p.url << "\n"
+ << "base: " << p.base << "\n"
+ << "scheme: " << p.scheme << "\n"
+ << "authority: " << p.authority.value() << "\n"
+ << "path: " << p.path << "\n"
+ << "query: " << print_map(p.query) << "\n"
+ << "fragment: " << p.fragment << "\n";
+ }
+
+ TEST(parseURL, parsesSimpleHttpUrl) {
+ auto s = "http://www.example.org/file.tar.gz";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "http://www.example.org/file.tar.gz",
+ .base = "http://www.example.org/file.tar.gz",
+ .scheme = "http",
+ .authority = "www.example.org",
+ .path = "/file.tar.gz",
+ .query = (StringMap) { },
+ .fragment = "",
+ };
+
+ ASSERT_EQ(parsed, expected);
+ }
+
+ TEST(parseURL, parsesSimpleHttpsUrl) {
+ auto s = "https://www.example.org/file.tar.gz";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "https://www.example.org/file.tar.gz",
+ .base = "https://www.example.org/file.tar.gz",
+ .scheme = "https",
+ .authority = "www.example.org",
+ .path = "/file.tar.gz",
+ .query = (StringMap) { },
+ .fragment = "",
+ };
+
+ ASSERT_EQ(parsed, expected);
+ }
+
+ TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) {
+ auto s = "https://www.example.org/file.tar.gz?download=fast&when=now#hello";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "https://www.example.org/file.tar.gz",
+ .base = "https://www.example.org/file.tar.gz",
+ .scheme = "https",
+ .authority = "www.example.org",
+ .path = "/file.tar.gz",
+ .query = (StringMap) { { "download", "fast" }, { "when", "now" } },
+ .fragment = "hello",
+ };
+
+ ASSERT_EQ(parsed, expected);
+ }
+
+ TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) {
+ auto s = "http://www.example.org/file.tar.gz?field=value#?foo=bar%23";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "http://www.example.org/file.tar.gz",
+ .base = "http://www.example.org/file.tar.gz",
+ .scheme = "http",
+ .authority = "www.example.org",
+ .path = "/file.tar.gz",
+ .query = (StringMap) { { "field", "value" } },
+ .fragment = "?foo=bar#",
+ };
+
+ ASSERT_EQ(parsed, expected);
+ }
+
+
+ TEST(parseURL, parseIPv4Address) {
+ auto s = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "http://127.0.0.1:8080/file.tar.gz",
+ .base = "https://127.0.0.1:8080/file.tar.gz",
+ .scheme = "http",
+ .authority = "127.0.0.1:8080",
+ .path = "/file.tar.gz",
+ .query = (StringMap) { { "download", "fast" }, { "when", "now" } },
+ .fragment = "hello",
+ };
+
+ ASSERT_EQ(parsed, expected);
+ }
+
+ TEST(parseURL, parseIPv6Address) {
+ auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
+ .base = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
+ .scheme = "http",
+ .authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
+ .path = "",
+ .query = (StringMap) { },
+ .fragment = "",
+ };
+
+ ASSERT_EQ(parsed, expected);
+
+ }
+
+ TEST(parseURL, parseEmptyQueryParams) {
+ auto s = "http://127.0.0.1:8080/file.tar.gz?&&&&&";
+ auto parsed = parseURL(s);
+ ASSERT_EQ(parsed.query, (StringMap) { });
+ }
+
+ TEST(parseURL, parseUserPassword) {
+ auto s = "http://user:pass@www.example.org:8080/file.tar.gz";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "http://user:pass@www.example.org/file.tar.gz",
+ .base = "http://user:pass@www.example.org/file.tar.gz",
+ .scheme = "http",
+ .authority = "user:pass@www.example.org:8080",
+ .path = "/file.tar.gz",
+ .query = (StringMap) { },
+ .fragment = "",
+ };
+
+
+ ASSERT_EQ(parsed, expected);
+ }
+
+ TEST(parseURL, parseFileURLWithQueryAndFragment) {
+ auto s = "file:///none/of/your/business";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "",
+ .base = "",
+ .scheme = "file",
+ .authority = "",
+ .path = "/none/of/your/business",
+ .query = (StringMap) { },
+ .fragment = "",
+ };
+
+ ASSERT_EQ(parsed, expected);
+
+ }
+
+ TEST(parseURL, parsedUrlsIsEqualToItself) {
+ auto s = "http://www.example.org/file.tar.gz";
+ auto url = parseURL(s);
+
+ ASSERT_TRUE(url == url);
+ }
+
+ TEST(parseURL, parseFTPUrl) {
+ auto s = "ftp://ftp.nixos.org/downloads/nixos.iso";
+ auto parsed = parseURL(s);
+
+ ParsedURL expected {
+ .url = "ftp://ftp.nixos.org/downloads/nixos.iso",
+ .base = "ftp://ftp.nixos.org/downloads/nixos.iso",
+ .scheme = "ftp",
+ .authority = "ftp.nixos.org",
+ .path = "/downloads/nixos.iso",
+ .query = (StringMap) { },
+ .fragment = "",
+ };
+
+ ASSERT_EQ(parsed, expected);
+ }
+
+ TEST(parseURL, parsesAnythingInUriFormat) {
+ auto s = "whatever://github.com/NixOS/nixpkgs.git";
+ auto parsed = parseURL(s);
+ }
+
+ TEST(parseURL, parsesAnythingInUriFormatWithoutDoubleSlash) {
+ auto s = "whatever:github.com/NixOS/nixpkgs.git";
+ auto parsed = parseURL(s);
+ }
+
+ TEST(parseURL, emptyStringIsInvalidURL) {
+ ASSERT_THROW(parseURL(""), Error);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * decodeQuery
+ * --------------------------------------------------------------------------*/
+
+ TEST(decodeQuery, emptyStringYieldsEmptyMap) {
+ auto d = decodeQuery("");
+ ASSERT_EQ(d, (StringMap) { });
+ }
+
+ TEST(decodeQuery, simpleDecode) {
+ auto d = decodeQuery("yi=one&er=two");
+ ASSERT_EQ(d, ((StringMap) { { "yi", "one" }, { "er", "two" } }));
+ }
+
+ TEST(decodeQuery, decodeUrlEncodedArgs) {
+ auto d = decodeQuery("arg=%3D%3D%40%3D%3D");
+ ASSERT_EQ(d, ((StringMap) { { "arg", "==@==" } }));
+ }
+
+ TEST(decodeQuery, decodeArgWithEmptyValue) {
+ auto d = decodeQuery("arg=");
+ ASSERT_EQ(d, ((StringMap) { { "arg", ""} }));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * percentDecode
+ * --------------------------------------------------------------------------*/
+
+ TEST(percentDecode, decodesUrlEncodedString) {
+ string s = "==@==";
+ string d = percentDecode("%3D%3D%40%3D%3D");
+ ASSERT_EQ(d, s);
+ }
+
+ TEST(percentDecode, multipleDecodesAreIdempotent) {
+ string once = percentDecode("%3D%3D%40%3D%3D");
+ string twice = percentDecode(once);
+
+ ASSERT_EQ(once, twice);
+ }
+
+ TEST(percentDecode, trailingPercent) {
+ string s = "==@==%";
+ string d = percentDecode("%3D%3D%40%3D%3D%25");
+
+ ASSERT_EQ(d, s);
+ }
+
+}
diff --git a/src/libutil/types.hh b/src/libutil/types.hh
index 20b96a85c..250c9581d 100644
--- a/src/libutil/types.hh
+++ b/src/libutil/types.hh
@@ -41,7 +41,8 @@ struct FormatOrString
{
string s;
FormatOrString(const string & s) : s(s) { };
- FormatOrString(const format & f) : s(f.str()) { };
+ template<class F>
+ FormatOrString(const F & f) : s(f.str()) { };
FormatOrString(const char * s) : s(s) { };
};
@@ -51,12 +52,13 @@ struct FormatOrString
... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion
takes place). */
-inline void formatHelper(boost::format & f)
+template<class F>
+inline void formatHelper(F & f)
{
}
-template<typename T, typename... Args>
-inline void formatHelper(boost::format & f, const T & x, const Args & ... args)
+template<class F, typename T, typename... Args>
+inline void formatHelper(F & f, const T & x, const Args & ... args)
{
formatHelper(f % x, args...);
}
@@ -157,4 +159,12 @@ typedef list<Path> Paths;
typedef set<Path> PathSet;
+/* Helper class to run code at startup. */
+template<typename T>
+struct OnStartup
+{
+ OnStartup(T && t) { t(); }
+};
+
+
}
diff --git a/src/libutil/url.cc b/src/libutil/url.cc
new file mode 100644
index 000000000..5d5328e5d
--- /dev/null
+++ b/src/libutil/url.cc
@@ -0,0 +1,137 @@
+#include "url.hh"
+#include "util.hh"
+
+namespace nix {
+
+std::regex refRegex(refRegexS, std::regex::ECMAScript);
+std::regex revRegex(revRegexS, std::regex::ECMAScript);
+std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
+
+ParsedURL parseURL(const std::string & url)
+{
+ static std::regex uriRegex(
+ "((" + schemeRegex + "):"
+ + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))"
+ + "(?:\\?(" + queryRegex + "))?"
+ + "(?:#(" + queryRegex + "))?",
+ std::regex::ECMAScript);
+
+ std::smatch match;
+
+ if (std::regex_match(url, match, uriRegex)) {
+ auto & base = match[1];
+ std::string scheme = match[2];
+ auto authority = match[3].matched
+ ? std::optional<std::string>(match[3]) : std::nullopt;
+ std::string path = match[4].matched ? match[4] : match[5];
+ auto & query = match[6];
+ auto & fragment = match[7];
+
+ auto isFile = scheme.find("file") != std::string::npos;
+
+ if (authority && *authority != "" && isFile)
+ throw Error("file:// URL '%s' has unexpected authority '%s'",
+ url, *authority);
+
+ if (isFile && path.empty())
+ path = "/";
+
+ return ParsedURL{
+ .url = url,
+ .base = base,
+ .scheme = scheme,
+ .authority = authority,
+ .path = path,
+ .query = decodeQuery(query),
+ .fragment = percentDecode(std::string(fragment))
+ };
+ }
+
+ else
+ throw BadURL("'%s' is not a valid URL", url);
+}
+
+std::string percentDecode(std::string_view in)
+{
+ std::string decoded;
+ for (size_t i = 0; i < in.size(); ) {
+ if (in[i] == '%') {
+ if (i + 2 >= in.size())
+ throw BadURL("invalid URI parameter '%s'", in);
+ try {
+ decoded += std::stoul(std::string(in, i + 1, 2), 0, 16);
+ i += 3;
+ } catch (...) {
+ throw BadURL("invalid URI parameter '%s'", in);
+ }
+ } else
+ decoded += in[i++];
+ }
+ return decoded;
+}
+
+std::map<std::string, std::string> decodeQuery(const std::string & query)
+{
+ std::map<std::string, std::string> result;
+
+ for (auto s : tokenizeString<Strings>(query, "&")) {
+ auto e = s.find('=');
+ if (e != std::string::npos)
+ result.emplace(
+ s.substr(0, e),
+ percentDecode(std::string_view(s).substr(e + 1)));
+ }
+
+ return result;
+}
+
+std::string percentEncode(std::string_view s)
+{
+ std::string res;
+ for (auto & c : s)
+ if ((c >= 'a' && c <= 'z')
+ || (c >= 'A' && c <= 'Z')
+ || (c >= '0' && c <= '9')
+ || strchr("-._~!$&'()*+,;=:@", c))
+ res += c;
+ else
+ res += fmt("%%%02x", (unsigned int) c);
+ return res;
+}
+
+std::string encodeQuery(const std::map<std::string, std::string> & ss)
+{
+ std::string res;
+ bool first = true;
+ for (auto & [name, value] : ss) {
+ if (!first) res += '&';
+ first = false;
+ res += percentEncode(name);
+ res += '=';
+ res += percentEncode(value);
+ }
+ return res;
+}
+
+std::string ParsedURL::to_string() const
+{
+ return
+ scheme
+ + ":"
+ + (authority ? "//" + *authority : "")
+ + path
+ + (query.empty() ? "" : "?" + encodeQuery(query))
+ + (fragment.empty() ? "" : "#" + percentEncode(fragment));
+}
+
+bool ParsedURL::operator ==(const ParsedURL & other) const
+{
+ return
+ scheme == other.scheme
+ && authority == other.authority
+ && path == other.path
+ && query == other.query
+ && fragment == other.fragment;
+}
+
+}
diff --git a/src/libutil/url.hh b/src/libutil/url.hh
new file mode 100644
index 000000000..1503023a2
--- /dev/null
+++ b/src/libutil/url.hh
@@ -0,0 +1,62 @@
+#pragma once
+
+#include "types.hh"
+
+#include <regex>
+
+namespace nix {
+
+struct ParsedURL
+{
+ std::string url;
+ std::string base; // URL without query/fragment
+ std::string scheme;
+ std::optional<std::string> authority;
+ std::string path;
+ std::map<std::string, std::string> query;
+ std::string fragment;
+
+ std::string to_string() const;
+
+ bool operator ==(const ParsedURL & other) const;
+};
+
+MakeError(BadURL, Error);
+
+std::string percentDecode(std::string_view in);
+
+std::map<std::string, std::string> decodeQuery(const std::string & query);
+
+ParsedURL parseURL(const std::string & url);
+
+// URI stuff.
+const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
+const static std::string schemeRegex = "(?:[a-z+]+)";
+const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
+const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
+const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
+const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
+const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
+const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
+const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
+const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
+const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
+const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
+const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
+const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
+
+// A Git ref (i.e. branch or tag name).
+const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
+extern std::regex refRegex;
+
+// A Git revision (a SHA-1 commit hash).
+const static std::string revRegexS = "[0-9a-fA-F]{40}";
+extern std::regex revRegex;
+
+// A ref or revision, or a ref followed by a revision.
+const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
+
+const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
+extern std::regex flakeIdRegex;
+
+}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 097ff210a..71db92d77 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -268,16 +268,13 @@ bool isLink(const Path & path)
}
-DirEntries readDirectory(const Path & path)
+DirEntries readDirectory(DIR *dir, const Path & path)
{
DirEntries entries;
entries.reserve(64);
- AutoCloseDir dir(opendir(path.c_str()));
- if (!dir) throw SysError(format("opening directory '%1%'") % path);
-
struct dirent * dirent;
- while (errno = 0, dirent = readdir(dir.get())) { /* sic */
+ while (errno = 0, dirent = readdir(dir)) { /* sic */
checkInterrupt();
string name = dirent->d_name;
if (name == "." || name == "..") continue;
@@ -294,6 +291,14 @@ DirEntries readDirectory(const Path & path)
return entries;
}
+DirEntries readDirectory(const Path & path)
+{
+ AutoCloseDir dir(opendir(path.c_str()));
+ if (!dir) throw SysError(format("opening directory '%1%'") % path);
+
+ return readDirectory(dir.get(), path);
+}
+
unsigned char getFileType(const Path & path)
{
@@ -311,19 +316,16 @@ string readFile(int fd)
if (fstat(fd, &st) == -1)
throw SysError("statting file");
- std::vector<unsigned char> buf(st.st_size);
- readFull(fd, buf.data(), st.st_size);
-
- return string((char *) buf.data(), st.st_size);
+ return drainFD(fd, true, st.st_size);
}
-string readFile(const Path & path, bool drain)
+string readFile(const Path & path)
{
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
if (!fd)
throw SysError(format("opening file '%1%'") % path);
- return drain ? drainFD(fd.get()) : readFile(fd.get());
+ return readFile(fd.get());
}
@@ -389,12 +391,14 @@ void writeLine(int fd, string s)
}
-static void _deletePath(const Path & path, unsigned long long & bytesFreed)
+static void _deletePath(int parentfd, const Path & path, unsigned long long & bytesFreed)
{
checkInterrupt();
+ string name(baseNameOf(path));
+
struct stat st;
- if (lstat(path.c_str(), &st) == -1) {
+ if (fstatat(parentfd, name.c_str(), &st, AT_SYMLINK_NOFOLLOW) == -1) {
if (errno == ENOENT) return;
throw SysError(format("getting status of '%1%'") % path);
}
@@ -406,20 +410,45 @@ static void _deletePath(const Path & path, unsigned long long & bytesFreed)
/* Make the directory accessible. */
const auto PERM_MASK = S_IRUSR | S_IWUSR | S_IXUSR;
if ((st.st_mode & PERM_MASK) != PERM_MASK) {
- if (chmod(path.c_str(), st.st_mode | PERM_MASK) == -1)
+ if (fchmodat(parentfd, name.c_str(), st.st_mode | PERM_MASK, 0) == -1)
throw SysError(format("chmod '%1%'") % path);
}
- for (auto & i : readDirectory(path))
- _deletePath(path + "/" + i.name, bytesFreed);
+ int fd = openat(parentfd, path.c_str(), O_RDONLY);
+ if (!fd)
+ throw SysError(format("opening directory '%1%'") % path);
+ AutoCloseDir dir(fdopendir(fd));
+ if (!dir)
+ throw SysError(format("opening directory '%1%'") % path);
+ for (auto & i : readDirectory(dir.get(), path))
+ _deletePath(dirfd(dir.get()), path + "/" + i.name, bytesFreed);
}
- if (remove(path.c_str()) == -1) {
+ int flags = S_ISDIR(st.st_mode) ? AT_REMOVEDIR : 0;
+ if (unlinkat(parentfd, name.c_str(), flags) == -1) {
if (errno == ENOENT) return;
throw SysError(format("cannot unlink '%1%'") % path);
}
}
+static void _deletePath(const Path & path, unsigned long long & bytesFreed)
+{
+ Path dir = dirOf(path);
+ if (dir == "")
+ dir = "/";
+
+ AutoCloseFD dirfd(open(dir.c_str(), O_RDONLY));
+ if (!dirfd) {
+ // This really shouldn't fail silently, but it's left this way
+ // for backwards compatibility.
+ if (errno == ENOENT) return;
+
+ throw SysError(format("opening directory '%1%'") % path);
+ }
+
+ _deletePath(dirfd.get(), path, bytesFreed);
+}
+
void deletePath(const Path & path)
{
@@ -478,6 +507,17 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix,
}
+std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
+{
+ Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
+ // Strictly speaking, this is UB, but who cares...
+ AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
+ if (!fd)
+ throw SysError("creating temporary file '%s'", tmpl);
+ return {std::move(fd), tmpl};
+}
+
+
std::string getUserName()
{
auto pw = getpwuid(geteuid());
@@ -622,9 +662,9 @@ void writeFull(int fd, const string & s, bool allowInterrupts)
}
-string drainFD(int fd, bool block)
+string drainFD(int fd, bool block, const size_t reserveSize)
{
- StringSink sink;
+ StringSink sink(reserveSize);
drainFD(fd, sink, block);
return std::move(*sink.s);
}
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index 7c3a30242..a63ee05b3 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -2,6 +2,7 @@
#include "types.hh"
#include "logging.hh"
+#include "ansicolor.hh"
#include <sys/types.h>
#include <sys/stat.h>
@@ -16,6 +17,7 @@
#include <sstream>
#include <optional>
#include <future>
+#include <iterator>
#ifndef HAVE_STRUCT_DIRENT_D_TYPE
#define DT_UNKNOWN 0
@@ -56,12 +58,12 @@ Path canonPath(const Path & path, bool resolveSymlinks = false);
/* Return the directory part of the given canonical path, i.e.,
everything before the final `/'. If the path is the root or an
- immediate child thereof (e.g., `/foo'), this means an empty string
- is returned. */
+ immediate child thereof (e.g., `/foo'), this means `/'
+ is returned.*/
Path dirOf(const Path & path);
/* Return the base name of the given canonical path, i.e., everything
- following the final `/'. */
+ following the final `/' (trailing slashes are removed). */
std::string_view baseNameOf(std::string_view path);
/* Check whether 'path' is a descendant of 'dir'. */
@@ -101,7 +103,7 @@ unsigned char getFileType(const Path & path);
/* Read the contents of a file into a string. */
string readFile(int fd);
-string readFile(const Path & path, bool drain = false);
+string readFile(const Path & path);
void readFile(const Path & path, Sink & sink);
/* Write a string to a file. */
@@ -122,10 +124,6 @@ void deletePath(const Path & path);
void deletePath(const Path & path, unsigned long long & bytesFreed);
-/* Create a temporary directory. */
-Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix",
- bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755);
-
std::string getUserName();
/* Return $HOME or the user's home directory from /etc/passwd. */
@@ -164,7 +162,7 @@ MakeError(EndOfFile, Error);
/* Read a file descriptor until EOF occurs. */
-string drainFD(int fd, bool block = true);
+string drainFD(int fd, bool block = true, const size_t reserveSize=0);
void drainFD(int fd, Sink & sink, bool block = true);
@@ -205,6 +203,14 @@ public:
};
+/* Create a temporary directory. */
+Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix",
+ bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755);
+
+/* Create a temporary file, returning a file handle and its path. */
+std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix = "nix");
+
+
class Pipe
{
public:
@@ -383,17 +389,6 @@ string replaceStrings(const std::string & s,
std::string rewriteStrings(const std::string & s, const StringMap & rewrites);
-/* If a set contains 'from', remove it and insert 'to'. */
-template<typename T>
-void replaceInSet(std::set<T> & set, const T & from, const T & to)
-{
- auto i = set.find(from);
- if (i == set.end()) return;
- set.erase(i);
- set.insert(to);
-}
-
-
/* Convert the exit status of a child as returned by wait() into an
error string. */
string statusToString(int status);
@@ -441,15 +436,6 @@ std::string shellEscape(const std::string & s);
void ignoreException();
-/* Some ANSI escape sequences. */
-#define ANSI_NORMAL "\e[0m"
-#define ANSI_BOLD "\e[1m"
-#define ANSI_FAINT "\e[2m"
-#define ANSI_RED "\e[31;1m"
-#define ANSI_GREEN "\e[32;1m"
-#define ANSI_YELLOW "\e[33;1m"
-#define ANSI_BLUE "\e[34;1m"
-
/* Tree formatting. */
constexpr char treeConn[] = "├───";
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index a2639579d..abd390414 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -1,8 +1,9 @@
#include "shared.hh"
#include "globals.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "../nix/legacy.hh"
+#include "fetchers.hh"
#include <fcntl.h>
#include <regex>
@@ -86,12 +87,9 @@ static void update(const StringSet & channelNames)
// We want to download the url to a file to see if it's a tarball while also checking if we
// got redirected in the process, so that we can grab the various parts of a nix channel
// definition from a consistent location if the redirect changes mid-download.
- CachedDownloadRequest request(url);
- request.ttl = 0;
- auto dl = getDownloader();
- auto result = dl->downloadCached(store, request);
- auto filename = result.path;
- url = chomp(result.effectiveUri);
+ auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false);
+ auto filename = store->toRealPath(result.storePath);
+ url = result.effectiveUrl;
// If the URL contains a version number, append it to the name
// attribute (so that "nix-env -q" on the channels profile
@@ -114,11 +112,10 @@ static void update(const StringSet & channelNames)
if (!unpacked) {
// Download the channel tarball.
try {
- filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path;
- } catch (DownloadError & e) {
- filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path;
+ filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath);
+ } catch (FileTransferError & e) {
+ filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath);
}
- chomp(filename);
}
// Regardless of where it came from, add the expression representing this channel to accumulated expression
@@ -185,6 +182,8 @@ static int _main(int argc, char ** argv)
} else if (*arg == "--rollback") {
cmd = cRollback;
} else {
+ if (hasPrefix(*arg, "-"))
+ throw UsageError("unsupported argument '%s'", *arg);
args.push_back(std::move(*arg));
}
return true;
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 1a2bb42a3..d62febaff 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -718,28 +718,39 @@ static void uninstallDerivations(Globals & globals, Strings & selectors,
while (true) {
string lockToken = optimisticLockProfile(profile);
- DrvInfos installedElems = queryInstalled(*globals.state, profile);
- DrvInfos newElems;
-
- for (auto & i : installedElems) {
- DrvName drvName(i.queryName());
- bool found = false;
- for (auto & j : selectors)
- /* !!! the repeated calls to followLinksToStorePath()
- are expensive, should pre-compute them. */
- if ((isPath(j) && globals.state->store->parseStorePath(i.queryOutPath()) == globals.state->store->followLinksToStorePath(j))
- || DrvName(j).matches(drvName))
- {
- printInfo("uninstalling '%s'", i.queryName());
- found = true;
- break;
- }
- if (!found) newElems.push_back(i);
+ DrvInfos workingElems = queryInstalled(*globals.state, profile);
+
+ for (auto & selector : selectors) {
+ DrvInfos::iterator split = workingElems.begin();
+ if (isPath(selector)) {
+ StorePath selectorStorePath = globals.state->store->followLinksToStorePath(selector);
+ split = std::partition(
+ workingElems.begin(), workingElems.end(),
+ [&selectorStorePath, globals](auto &elem) {
+ return selectorStorePath != globals.state->store->parseStorePath(elem.queryOutPath());
+ }
+ );
+ } else {
+ DrvName selectorName(selector);
+ split = std::partition(
+ workingElems.begin(), workingElems.end(),
+ [&selectorName](auto &elem){
+ DrvName elemName(elem.queryName());
+ return !selectorName.matches(elemName);
+ }
+ );
+ }
+ if (split == workingElems.end())
+ warn("selector '%s' matched no installed derivations", selector);
+ for (auto removedElem = split; removedElem != workingElems.end(); removedElem++) {
+ printInfo("uninstalling '%s'", removedElem->queryName());
+ }
+ workingElems.erase(split, workingElems.end());
}
if (globals.dryRun) return;
- if (createUserEnv(*globals.state, newElems,
+ if (createUserEnv(*globals.state, workingElems,
profile, settings.envKeepDerivations, lockToken)) break;
}
}
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 717431b7a..f852916d8 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -15,6 +15,8 @@ namespace nix {
DrvInfos queryInstalled(EvalState & state, const Path & userEnv)
{
DrvInfos elems;
+ if (pathExists(userEnv + "/manifest.json"))
+ throw Error("profile '%s' is incompatible with 'nix-env'; please use 'nix profile' instead", userEnv);
Path manifestFile = userEnv + "/manifest.nix";
if (pathExists(manifestFile)) {
Value v;
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index af510bc08..5a686c8cd 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -1,6 +1,6 @@
#include "hash.hh"
#include "shared.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "eval.hh"
#include "eval-inline.hh"
@@ -181,9 +181,9 @@ static int _main(int argc, char * * argv)
FdSink sink(fd.get());
- DownloadRequest req(actualUri);
+ FileTransferRequest req(actualUri);
req.decompress = false;
- getDownloader()->download(std::move(req), sink);
+ getFileTransfer()->download(std::move(req), sink);
}
/* Optionally unpack the file. */
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index e82ab844e..4b4ba81cb 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -14,12 +14,13 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{
expectArg("path", &path);
- mkFlag()
- .longName("name")
- .shortName('n')
- .description("name component of the store path")
- .labels({"name"})
- .dest(&namePart);
+ addFlag({
+ .longName = "name",
+ .shortName = 'n',
+ .description = "name component of the store path",
+ .labels = {"name"},
+ .handler = {&namePart},
+ });
}
std::string description() override
@@ -33,6 +34,8 @@ struct CmdAddToStore : MixDryRun, StoreCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
if (!namePart) namePart = baseNameOf(path);
@@ -50,7 +53,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
if (!dryRun)
store->addToStore(info, sink.s);
- std::cout << fmt("%s\n", store->printStorePath(info.path));
+ logger->stdout("%s", store->printStorePath(info.path));
}
};
diff --git a/src/nix/build.cc b/src/nix/build.cc
index 3c9d2df39..850e09ce8 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -5,23 +5,25 @@
using namespace nix;
-struct CmdBuild : MixDryRun, InstallablesCommand
+struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
{
Path outLink = "result";
CmdBuild()
{
- mkFlag()
- .longName("out-link")
- .shortName('o')
- .description("path of the symlink to the build result")
- .labels({"path"})
- .dest(&outLink);
+ addFlag({
+ .longName = "out-link",
+ .shortName = 'o',
+ .description = "path of the symlink to the build result",
+ .labels = {"path"},
+ .handler = {&outLink},
+ });
- mkFlag()
- .longName("no-link")
- .description("do not create a symlink to the build result")
- .set(&outLink, Path(""));
+ addFlag({
+ .longName = "no-link",
+ .description = "do not create a symlink to the build result",
+ .handler = {&outLink, Path("")},
+ });
}
std::string description() override
@@ -40,6 +42,10 @@ struct CmdBuild : MixDryRun, InstallablesCommand
"To build the build.x86_64-linux attribute from release.nix:",
"nix build -f release.nix build.x86_64-linux"
},
+ Example{
+ "To make a profile point at GNU Hello:",
+ "nix build --profile /tmp/profile nixpkgs.hello"
+ },
};
}
@@ -49,18 +55,19 @@ struct CmdBuild : MixDryRun, InstallablesCommand
if (dryRun) return;
- for (size_t i = 0; i < buildables.size(); ++i) {
- auto & b(buildables[i]);
-
- if (outLink != "")
- for (auto & output : b.outputs)
+ if (outLink != "") {
+ for (size_t i = 0; i < buildables.size(); ++i) {
+ for (auto & output : buildables[i].outputs)
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
if (output.first != "out") symlink += fmt("-%s", output.first);
store2->addPermRoot(output.second, absPath(symlink), true);
}
+ }
}
+
+ updateProfile(buildables);
}
};
diff --git a/src/nix/cat.cc b/src/nix/cat.cc
index 851f90abd..fd91f2036 100644
--- a/src/nix/cat.cc
+++ b/src/nix/cat.cc
@@ -30,9 +30,11 @@ struct CmdCatStore : StoreCommand, MixCat
std::string description() override
{
- return "print the contents of a store file on stdout";
+ return "print the contents of a file in the Nix store on stdout";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
cat(store->getFSAccessor());
@@ -51,9 +53,11 @@ struct CmdCatNar : StoreCommand, MixCat
std::string description() override
{
- return "print the contents of a file inside a NAR file";
+ return "print the contents of a file inside a NAR file on stdout";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
cat(makeNarAccessor(make_ref<std::string>(readFile(narPath))));
diff --git a/src/nix/command.cc b/src/nix/command.cc
index fce6c391c..ea0ade88e 100644
--- a/src/nix/command.cc
+++ b/src/nix/command.cc
@@ -2,6 +2,9 @@
#include "store-api.hh"
#include "derivations.hh"
#include "nixexpr.hh"
+#include "profiles.hh"
+
+extern char * * environ;
namespace nix {
@@ -32,16 +35,18 @@ StorePathsCommand::StorePathsCommand(FileIngestionMethod recursive)
: recursive(recursive)
{
if (recursive == FileIngestionMethod::Recursive)
- mkFlag()
- .longName("no-recursive")
- .description("apply operation to specified paths only")
- .set(&this->recursive, FileIngestionMethod::Flat);
+ addFlag({
+ .longName = "no-recursive",
+ .description = "apply operation to specified paths only",
+ .handler = {&this->recursive, FileIngestionMethod::Flat},
+ });
else
- mkFlag()
- .longName("recursive")
- .shortName('r')
- .description("apply operation to closure of the specified paths")
- .set(&this->recursive, FileIngestionMethod::Recursive);
+ addFlag({
+ .longName = "recursive",
+ .shortName = 'r',
+ .description = "apply operation to closure of the specified paths",
+ .handler = {&this->recursive, FileIngestionMethod::Recursive},
+ });
mkFlag(0, "all", "apply operation to the entire store", &all);
}
@@ -96,4 +101,98 @@ Strings editorFor(const Pos & pos)
return args;
}
+MixProfile::MixProfile()
+{
+ addFlag({
+ .longName = "profile",
+ .description = "profile to update",
+ .labels = {"path"},
+ .handler = {&profile},
+ });
+}
+
+void MixProfile::updateProfile(const StorePath & storePath)
+{
+ if (!profile) return;
+ auto store = getStore().dynamic_pointer_cast<LocalFSStore>();
+ if (!store) throw Error("'--profile' is not supported for this Nix store");
+ auto profile2 = absPath(*profile);
+ switchLink(profile2,
+ createGeneration(
+ ref<LocalFSStore>(store),
+ profile2, store->printStorePath(storePath)));
+}
+
+void MixProfile::updateProfile(const Buildables & buildables)
+{
+ if (!profile) return;
+
+ std::optional<StorePath> result;
+
+ for (auto & buildable : buildables) {
+ for (auto & output : buildable.outputs) {
+ if (result)
+ throw Error("'--profile' requires that the arguments produce a single store path, but there are multiple");
+ result = output.second.clone();
+ }
+ }
+
+ if (!result)
+ throw Error("'--profile' requires that the arguments produce a single store path, but there are none");
+
+ updateProfile(*result);
+}
+
+MixDefaultProfile::MixDefaultProfile()
+{
+ profile = getDefaultProfile();
+}
+
+MixEnvironment::MixEnvironment() : ignoreEnvironment(false)
+{
+ addFlag({
+ .longName = "ignore-environment",
+ .shortName = 'i',
+ .description = "clear the entire environment (except those specified with --keep)",
+ .handler = {&ignoreEnvironment, true},
+ });
+
+ addFlag({
+ .longName = "keep",
+ .shortName = 'k',
+ .description = "keep specified environment variable",
+ .labels = {"name"},
+ .handler = {[&](std::string s) { keep.insert(s); }},
+ });
+
+ addFlag({
+ .longName = "unset",
+ .shortName = 'u',
+ .description = "unset specified environment variable",
+ .labels = {"name"},
+ .handler = {[&](std::string s) { unset.insert(s); }},
+ });
+}
+
+void MixEnvironment::setEnviron() {
+ if (ignoreEnvironment) {
+ if (!unset.empty())
+ throw UsageError("--unset does not make sense with --ignore-environment");
+
+ for (const auto & var : keep) {
+ auto val = getenv(var.c_str());
+ if (val) stringsEnv.emplace_back(fmt("%s=%s", var.c_str(), val));
+ }
+
+ vectorEnv = stringsToCharPtrs(stringsEnv);
+ environ = vectorEnv.data();
+ } else {
+ if (!keep.empty())
+ throw UsageError("--keep does not make sense without --ignore-environment");
+
+ for (const auto & var : unset)
+ unsetenv(var.c_str());
+ }
+}
+
}
diff --git a/src/nix/command.hh b/src/nix/command.hh
index 4cdda9f79..09c621b5b 100644
--- a/src/nix/command.hh
+++ b/src/nix/command.hh
@@ -1,5 +1,6 @@
#pragma once
+#include "installables.hh"
#include "args.hh"
#include "common-eval-args.hh"
#include "path.hh"
@@ -9,6 +10,10 @@ namespace nix {
extern std::string programPath;
+static constexpr Command::Category catSecondary = 100;
+static constexpr Command::Category catUtility = 101;
+static constexpr Command::Category catNixInstallation = 102;
+
/* A command that requires a Nix store. */
struct StoreCommand : virtual Command
{
@@ -22,34 +27,7 @@ private:
std::shared_ptr<Store> _store;
};
-struct Buildable
-{
- std::optional<StorePath> drvPath;
- std::map<std::string, StorePath> outputs;
-};
-
-typedef std::vector<Buildable> Buildables;
-
-struct Installable
-{
- virtual ~Installable() { }
-
- virtual std::string what() = 0;
-
- virtual Buildables toBuildables()
- {
- throw Error("argument '%s' cannot be built", what());
- }
-
- Buildable toBuildable();
-
- virtual std::pair<Value *, Pos> toValue(EvalState & state)
- {
- throw Error("argument '%s' cannot be evaluated", what());
- }
-};
-
-struct SourceExprCommand : virtual Args, StoreCommand, MixEvalArgs
+struct SourceExprCommand : virtual StoreCommand, MixEvalArgs
{
Path file;
@@ -67,7 +45,7 @@ private:
std::shared_ptr<EvalState> evalState;
- Value * vSourceExpr = 0;
+ RootValue vSourceExpr;
};
enum RealiseMode { Build, NoBuild, DryRun };
@@ -184,4 +162,36 @@ std::set<StorePath> toDerivations(ref<Store> store,
filename:lineno. */
Strings editorFor(const Pos & pos);
+struct MixProfile : virtual StoreCommand
+{
+ std::optional<Path> profile;
+
+ MixProfile();
+
+ /* If 'profile' is set, make it point at 'storePath'. */
+ void updateProfile(const StorePath & storePath);
+
+ /* If 'profile' is set, make it point at the store path produced
+ by 'buildables'. */
+ void updateProfile(const Buildables & buildables);
+};
+
+struct MixDefaultProfile : MixProfile
+{
+ MixDefaultProfile();
+};
+
+struct MixEnvironment : virtual Args {
+
+ StringSet keep, unset;
+ Strings stringsEnv;
+ std::vector<char*> vectorEnv;
+ bool ignoreEnvironment;
+
+ MixEnvironment();
+
+ /* Modify global environ based on ignoreEnvironment, keep, and unset. It's expected that exec will be called before this class goes out of scope, otherwise environ will become invalid. */
+ void setEnviron();
+};
+
}
diff --git a/src/nix/copy.cc b/src/nix/copy.cc
index a8ee3fce3..60aa3f14b 100644
--- a/src/nix/copy.cc
+++ b/src/nix/copy.cc
@@ -19,27 +19,32 @@ struct CmdCopy : StorePathsCommand
CmdCopy()
: StorePathsCommand(FileIngestionMethod::Recursive)
{
- mkFlag()
- .longName("from")
- .labels({"store-uri"})
- .description("URI of the source Nix store")
- .dest(&srcUri);
- mkFlag()
- .longName("to")
- .labels({"store-uri"})
- .description("URI of the destination Nix store")
- .dest(&dstUri);
-
- mkFlag()
- .longName("no-check-sigs")
- .description("do not require that paths are signed by trusted keys")
- .set(&checkSigs, NoCheckSigs);
-
- mkFlag()
- .longName("substitute-on-destination")
- .shortName('s')
- .description("whether to try substitutes on the destination store (only supported by SSH)")
- .set(&substitute, Substitute);
+ addFlag({
+ .longName = "from",
+ .description = "URI of the source Nix store",
+ .labels = {"store-uri"},
+ .handler = {&srcUri},
+ });
+
+ addFlag({
+ .longName = "to",
+ .description = "URI of the destination Nix store",
+ .labels = {"store-uri"},
+ .handler = {&dstUri},
+ });
+
+ addFlag({
+ .longName = "no-check-sigs",
+ .description = "do not require that paths are signed by trusted keys",
+ .handler = {&checkSigs, NoCheckSigs},
+ });
+
+ addFlag({
+ .longName = "substitute-on-destination",
+ .shortName = 's',
+ .description = "whether to try substitutes on the destination store (only supported by SSH)",
+ .handler = {&substitute, Substitute},
+ });
}
std::string description() override
@@ -75,6 +80,8 @@ struct CmdCopy : StorePathsCommand
};
}
+ Category category() override { return catSecondary; }
+
ref<Store> createStore() override
{
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
diff --git a/src/nix/dev-shell.cc b/src/nix/dev-shell.cc
new file mode 100644
index 000000000..d300f6a23
--- /dev/null
+++ b/src/nix/dev-shell.cc
@@ -0,0 +1,344 @@
+#include "eval.hh"
+#include "command.hh"
+#include "common-args.hh"
+#include "shared.hh"
+#include "store-api.hh"
+#include "derivations.hh"
+#include "affinity.hh"
+#include "progress-bar.hh"
+
+#include <regex>
+
+using namespace nix;
+
+struct Var
+{
+ bool exported = true;
+ bool associative = false;
+ std::string value; // quoted string or array
+};
+
+struct BuildEnvironment
+{
+ std::map<std::string, Var> env;
+ std::string bashFunctions;
+};
+
+BuildEnvironment readEnvironment(const Path & path)
+{
+ BuildEnvironment res;
+
+ std::set<std::string> exported;
+
+ debug("reading environment file '%s'", path);
+
+ auto file = readFile(path);
+
+ auto pos = file.cbegin();
+
+ static std::string varNameRegex =
+ R"re((?:[a-zA-Z_][a-zA-Z0-9_]*))re";
+
+ static std::regex declareRegex(
+ "^declare -x (" + varNameRegex + ")" +
+ R"re((?:="((?:[^"\\]|\\.)*)")?\n)re");
+
+ static std::string simpleStringRegex =
+ R"re((?:[a-zA-Z0-9_/:\.\-\+=]*))re";
+
+ static std::string quotedStringRegex =
+ R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
+
+ static std::string indexedArrayRegex =
+ R"re((?:\(( *\[[0-9]+]="(?:[^"\\]|\\.)*")**\)))re";
+
+ static std::regex varRegex(
+ "^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n");
+
+ /* Note: we distinguish between an indexed and associative array
+ using the space before the closing parenthesis. Will
+ undoubtedly regret this some day. */
+ static std::regex assocArrayRegex(
+ "^(" + varNameRegex + ")=" + R"re((?:\(( *\[[^\]]+\]="(?:[^"\\]|\\.)*")* *\)))re" + "\n");
+
+ static std::regex functionRegex(
+ "^" + varNameRegex + " \\(\\) *\n");
+
+ while (pos != file.end()) {
+
+ std::smatch match;
+
+ if (std::regex_search(pos, file.cend(), match, declareRegex)) {
+ pos = match[0].second;
+ exported.insert(match[1]);
+ }
+
+ else if (std::regex_search(pos, file.cend(), match, varRegex)) {
+ pos = match[0].second;
+ res.env.insert({match[1], Var { .exported = exported.count(match[1]) > 0, .value = match[2] }});
+ }
+
+ else if (std::regex_search(pos, file.cend(), match, assocArrayRegex)) {
+ pos = match[0].second;
+ res.env.insert({match[1], Var { .associative = true, .value = match[2] }});
+ }
+
+ else if (std::regex_search(pos, file.cend(), match, functionRegex)) {
+ res.bashFunctions = std::string(pos, file.cend());
+ break;
+ }
+
+ else throw Error("shell environment '%s' has unexpected line '%s'",
+ path, file.substr(pos - file.cbegin(), 60));
+ }
+
+ return res;
+}
+
+const static std::string getEnvSh =
+ #include "get-env.sh.gen.hh"
+ ;
+
+/* Given an existing derivation, return the shell environment as
+ initialised by stdenv's setup script. We do this by building a
+ modified derivation with the same dependencies and nearly the same
+ initial environment variables, that just writes the resulting
+ environment to a file and exits. */
+StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
+{
+ auto drv = store->derivationFromPath(drvPath);
+
+ auto builder = baseNameOf(drv.builder);
+ if (builder != "bash")
+ throw Error("'nix dev-shell' only works on derivations that use 'bash' as their builder");
+
+ auto getEnvShPath = store->addTextToStore("get-env.sh", getEnvSh, {});
+
+ drv.args = {store->printStorePath(getEnvShPath)};
+
+ /* Remove derivation checks. */
+ drv.env.erase("allowedReferences");
+ drv.env.erase("allowedRequisites");
+ drv.env.erase("disallowedReferences");
+ drv.env.erase("disallowedRequisites");
+
+ /* Rehash and write the derivation. FIXME: would be nice to use
+ 'buildDerivation', but that's privileged. */
+ auto drvName = std::string(drvPath.name());
+ assert(hasSuffix(drvName, ".drv"));
+ drvName.resize(drvName.size() - 4);
+ drvName += "-env";
+ for (auto & output : drv.outputs)
+ drv.env.erase(output.first);
+ drv.env["out"] = "";
+ drv.env["outputs"] = "out";
+ drv.inputSrcs.insert(std::move(getEnvShPath));
+ Hash h = hashDerivationModulo(*store, drv, true);
+ auto shellOutPath = store->makeOutputPath("out", h, drvName);
+ drv.outputs.insert_or_assign("out", DerivationOutput(shellOutPath.clone(), "", ""));
+ drv.env["out"] = store->printStorePath(shellOutPath);
+ auto shellDrvPath2 = writeDerivation(store, drv, drvName);
+
+ /* Build the derivation. */
+ store->buildPaths({shellDrvPath2});
+
+ assert(store->isValidPath(shellOutPath));
+
+ return shellOutPath;
+}
+
+struct Common : InstallableCommand, MixProfile
+{
+ std::set<string> ignoreVars{
+ "BASHOPTS",
+ "EUID",
+ "HOME", // FIXME: don't ignore in pure mode?
+ "NIX_BUILD_TOP",
+ "NIX_ENFORCE_PURITY",
+ "NIX_LOG_FD",
+ "PPID",
+ "PWD",
+ "SHELLOPTS",
+ "SHLVL",
+ "SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt
+ "TEMP",
+ "TEMPDIR",
+ "TERM",
+ "TMP",
+ "TMPDIR",
+ "TZ",
+ "UID",
+ };
+
+ void makeRcScript(const BuildEnvironment & buildEnvironment, std::ostream & out)
+ {
+ out << "unset shellHook\n";
+
+ out << "nix_saved_PATH=\"$PATH\"\n";
+
+ for (auto & i : buildEnvironment.env) {
+ if (!ignoreVars.count(i.first) && !hasPrefix(i.first, "BASH_")) {
+ if (i.second.associative)
+ out << fmt("declare -A %s=(%s)\n", i.first, i.second.value);
+ else {
+ out << fmt("%s=%s\n", i.first, i.second.value);
+ if (i.second.exported)
+ out << fmt("export %s\n", i.first);
+ }
+ }
+ }
+
+ out << "PATH=\"$PATH:$nix_saved_PATH\"\n";
+
+ out << buildEnvironment.bashFunctions << "\n";
+
+ // FIXME: set outputs
+
+ out << "export NIX_BUILD_TOP=\"$(mktemp -d --tmpdir nix-shell.XXXXXX)\"\n";
+ for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
+ out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i);
+
+ out << "eval \"$shellHook\"\n";
+ }
+
+ StorePath getShellOutPath(ref<Store> store)
+ {
+ auto path = installable->getStorePath();
+ if (path && hasSuffix(path->to_string(), "-env"))
+ return path->clone();
+ else {
+ auto drvs = toDerivations(store, {installable});
+
+ if (drvs.size() != 1)
+ throw Error("'%s' needs to evaluate to a single derivation, but it evaluated to %d derivations",
+ installable->what(), drvs.size());
+
+ auto & drvPath = *drvs.begin();
+
+ return getDerivationEnvironment(store, drvPath);
+ }
+ }
+
+ std::pair<BuildEnvironment, std::string> getBuildEnvironment(ref<Store> store)
+ {
+ auto shellOutPath = getShellOutPath(store);
+
+ auto strPath = store->printStorePath(shellOutPath);
+
+ updateProfile(shellOutPath);
+
+ return {readEnvironment(strPath), strPath};
+ }
+};
+
+struct CmdDevShell : Common, MixEnvironment
+{
+ std::vector<std::string> command;
+
+ CmdDevShell()
+ {
+ addFlag({
+ .longName = "command",
+ .shortName = 'c',
+ .description = "command and arguments to be executed insted of an interactive shell",
+ .labels = {"command", "args"},
+ .handler = {[&](std::vector<std::string> ss) {
+ if (ss.empty()) throw UsageError("--command requires at least one argument");
+ command = ss;
+ }}
+ });
+ }
+
+ std::string description() override
+ {
+ return "run a bash shell that provides the build environment of a derivation";
+ }
+
+ Examples examples() override
+ {
+ return {
+ Example{
+ "To get the build environment of GNU hello:",
+ "nix dev-shell nixpkgs.hello"
+ },
+ Example{
+ "To store the build environment in a profile:",
+ "nix dev-shell --profile /tmp/my-shell nixpkgs.hello"
+ },
+ Example{
+ "To use a build environment previously recorded in a profile:",
+ "nix dev-shell /tmp/my-shell"
+ },
+ };
+ }
+
+ void run(ref<Store> store) override
+ {
+ auto [buildEnvironment, gcroot] = getBuildEnvironment(store);
+
+ auto [rcFileFd, rcFilePath] = createTempFile("nix-shell");
+
+ std::ostringstream ss;
+ makeRcScript(buildEnvironment, ss);
+
+ ss << fmt("rm -f '%s'\n", rcFilePath);
+
+ if (!command.empty()) {
+ std::vector<std::string> args;
+ for (auto s : command)
+ args.push_back(shellEscape(s));
+ ss << fmt("exec %s\n", concatStringsSep(" ", args));
+ }
+
+ writeFull(rcFileFd.get(), ss.str());
+
+ stopProgressBar();
+
+ auto shell = getEnv("SHELL").value_or("bash");
+
+ setEnviron();
+ // prevent garbage collection until shell exits
+ setenv("NIX_GCROOT", gcroot.data(), 1);
+
+ auto args = Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath};
+
+ restoreAffinity();
+ restoreSignals();
+
+ execvp(shell.c_str(), stringsToCharPtrs(args).data());
+
+ throw SysError("executing shell '%s'", shell);
+ }
+};
+
+struct CmdPrintDevEnv : Common
+{
+ std::string description() override
+ {
+ return "print shell code that can be sourced by bash to reproduce the build environment of a derivation";
+ }
+
+ Examples examples() override
+ {
+ return {
+ Example{
+ "To apply the build environment of GNU hello to the current shell:",
+ ". <(nix print-dev-env nixpkgs.hello)"
+ },
+ };
+ }
+
+ Category category() override { return catUtility; }
+
+ void run(ref<Store> store) override
+ {
+ auto buildEnvironment = getBuildEnvironment(store).first;
+
+ stopProgressBar();
+
+ makeRcScript(buildEnvironment, std::cout);
+ }
+};
+
+static auto r1 = registerCommand<CmdPrintDevEnv>("print-dev-env");
+static auto r2 = registerCommand<CmdDevShell>("dev-shell");
diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc
index 0aa634d6e..82e92cdd0 100644
--- a/src/nix/doctor.cc
+++ b/src/nix/doctor.cc
@@ -40,9 +40,11 @@ struct CmdDoctor : StoreCommand
std::string description() override
{
- return "check your system for potential problems and print a PASS or FAIL for each check.";
+ return "check your system for potential problems and print a PASS or FAIL for each check";
}
+ Category category() override { return catNixInstallation; }
+
void run(ref<Store> store) override
{
logger->log("Running checks against store uri: " + store->getUri());
diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc
index bb741b572..e1de71bf8 100644
--- a/src/nix/dump-path.cc
+++ b/src/nix/dump-path.cc
@@ -20,6 +20,8 @@ struct CmdDumpPath : StorePathCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, const StorePath & storePath) override
{
FdSink sink(STDOUT_FILENO);
diff --git a/src/nix/edit.cc b/src/nix/edit.cc
index 1683eada0..067d3a973 100644
--- a/src/nix/edit.cc
+++ b/src/nix/edit.cc
@@ -25,6 +25,8 @@ struct CmdEdit : InstallableCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
auto state = getEvalState();
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index 6398fc58e..26e98ac2a 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -45,6 +45,8 @@ struct CmdEval : MixJSON, InstallableCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
if (raw && json)
@@ -55,16 +57,15 @@ struct CmdEval : MixJSON, InstallableCommand
auto v = installable->toValue(*state).first;
PathSet context;
- stopProgressBar();
-
if (raw) {
+ stopProgressBar();
std::cout << state->coerceToString(noPos, *v, context);
} else if (json) {
JSONPlaceholder jsonOut(std::cout);
printValueAsJSON(*state, true, *v, jsonOut, context);
} else {
state->forceValueDeep(*v);
- std::cout << *v << "\n";
+ logger->stdout("%s", *v);
}
}
};
diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh
new file mode 100644
index 000000000..a25ec43a9
--- /dev/null
+++ b/src/nix/get-env.sh
@@ -0,0 +1,9 @@
+set -e
+if [ -e .attrs.sh ]; then source .attrs.sh; fi
+export IN_NIX_SHELL=impure
+export dontAddDisableDepTrack=1
+if [[ -n $stdenv ]]; then
+ source $stdenv/setup
+fi
+export > $out
+set >> $out
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 0cc523f50..366314227 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -23,9 +23,7 @@ struct CmdHash : Command
mkFlag(0, "base64", "print hash in base-64", &base, Base64);
mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base32);
mkFlag(0, "base16", "print hash in base-16", &base, Base16);
- mkFlag()
- .longName("type")
- .mkHashTypeFlag(&ht);
+ addFlag(Flag::mkHashTypeFlag("type", &ht));
#if 0
mkFlag()
.longName("modulo")
@@ -43,6 +41,8 @@ struct CmdHash : Command
: "print cryptographic hash of the NAR serialisation of a path";
}
+ Category category() override { return catUtility; }
+
void run() override
{
for (auto path : paths) {
@@ -60,8 +60,7 @@ struct CmdHash : Command
Hash h = hashSink->finish().first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
- std::cout << format("%1%\n") %
- h.to_string(base, base == SRI);
+ logger->stdout(h.to_string(base, base == SRI));
}
}
};
@@ -77,9 +76,7 @@ struct CmdToBase : Command
CmdToBase(Base base) : base(base)
{
- mkFlag()
- .longName("type")
- .mkHashTypeFlag(&ht);
+ addFlag(Flag::mkHashTypeFlag("type", &ht));
expectArgs("strings", &args);
}
@@ -92,10 +89,12 @@ struct CmdToBase : Command
"SRI");
}
+ Category category() override { return catUtility; }
+
void run() override
{
for (auto s : args)
- std::cout << fmt("%s\n", Hash(s, ht).to_string(base, base == SRI));
+ logger->stdout(Hash(s, ht).to_string(base, base == SRI));
}
};
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index 013218cd9..937d69206 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -12,26 +12,28 @@
namespace nix {
+
SourceExprCommand::SourceExprCommand()
{
- mkFlag()
- .shortName('f')
- .longName("file")
- .label("file")
- .description("evaluate FILE rather than the default")
- .dest(&file);
+ addFlag({
+ .longName = "file",
+ .shortName = 'f',
+ .description = "evaluate FILE rather than the default",
+ .labels = {"file"},
+ .handler = {&file}
+ });
}
Value * SourceExprCommand::getSourceExpr(EvalState & state)
{
- if (vSourceExpr) return vSourceExpr;
+ if (vSourceExpr) return *vSourceExpr;
auto sToplevel = state.symbols.create("_toplevel");
- vSourceExpr = state.allocValue();
+ vSourceExpr = allocRootValue(state.allocValue());
if (file != "")
- state.evalFile(lookupFileArg(state, file), *vSourceExpr);
+ state.evalFile(lookupFileArg(state, file), **vSourceExpr);
else {
@@ -39,9 +41,9 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state)
auto searchPath = state.getSearchPath();
- state.mkAttrs(*vSourceExpr, 1024);
+ state.mkAttrs(**vSourceExpr, 1024);
- mkBool(*state.allocAttr(*vSourceExpr, sToplevel), true);
+ mkBool(*state.allocAttr(**vSourceExpr, sToplevel), true);
std::unordered_set<std::string> seen;
@@ -52,7 +54,7 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state)
mkPrimOpApp(*v1, state.getBuiltin("findFile"), state.getBuiltin("nixPath"));
Value * v2 = state.allocValue();
mkApp(*v2, *v1, mkString(*state.allocValue(), name));
- mkApp(*state.allocAttr(*vSourceExpr, state.symbols.create(name)),
+ mkApp(*state.allocAttr(**vSourceExpr, state.symbols.create(name)),
state.getBuiltin("import"), *v2);
};
@@ -66,10 +68,10 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state)
} else
addEntry(i.first);
- vSourceExpr->attrs->sort();
+ (*vSourceExpr)->attrs->sort();
}
- return vSourceExpr;
+ return *vSourceExpr;
}
ref<EvalState> SourceExprCommand::getEvalState()
@@ -109,6 +111,11 @@ struct InstallableStorePath : Installable
bs.push_back(std::move(b));
return bs;
}
+
+ std::optional<StorePath> getStorePath() override
+ {
+ return storePath.clone();
+ }
};
struct InstallableValue : Installable
diff --git a/src/nix/installables.hh b/src/nix/installables.hh
new file mode 100644
index 000000000..503984220
--- /dev/null
+++ b/src/nix/installables.hh
@@ -0,0 +1,45 @@
+#pragma once
+
+#include "util.hh"
+#include "path.hh"
+#include "eval.hh"
+
+#include <optional>
+
+namespace nix {
+
+struct Buildable
+{
+ std::optional<StorePath> drvPath;
+ std::map<std::string, StorePath> outputs;
+};
+
+typedef std::vector<Buildable> Buildables;
+
+struct Installable
+{
+ virtual ~Installable() { }
+
+ virtual std::string what() = 0;
+
+ virtual Buildables toBuildables()
+ {
+ throw Error("argument '%s' cannot be built", what());
+ }
+
+ Buildable toBuildable();
+
+ virtual std::pair<Value *, Pos> toValue(EvalState & state)
+ {
+ throw Error("argument '%s' cannot be evaluated", what());
+ }
+
+ /* Return a value only if this installable is a store path or a
+ symlink to it. */
+ virtual std::optional<StorePath> getStorePath()
+ {
+ return {};
+ }
+};
+
+}
diff --git a/src/nix/local.mk b/src/nix/local.mk
index 50a18efd7..8c0eed19e 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -15,9 +15,9 @@ nix_SOURCES := \
$(wildcard src/nix-prefetch-url/*.cc) \
$(wildcard src/nix-store/*.cc) \
-nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain
+nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain
-nix_LIBS = libexpr libmain libstore libutil libnixrust
+nix_LIBS = libexpr libmain libfetchers libstore libutil libnixrust
nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system
@@ -27,3 +27,5 @@ $(foreach name, \
$(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote))
src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
+
+src/nix/dev-shell.cc: src/nix/get-env.sh.gen.hh
diff --git a/src/nix/log.cc b/src/nix/log.cc
index 795991cb7..3fe22f6c2 100644
--- a/src/nix/log.cc
+++ b/src/nix/log.cc
@@ -31,6 +31,8 @@ struct CmdLog : InstallableCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
settings.readOnlyMode = true;
diff --git a/src/nix/ls.cc b/src/nix/ls.cc
index 3ef1f2750..b9716a6a1 100644
--- a/src/nix/ls.cc
+++ b/src/nix/ls.cc
@@ -34,16 +34,14 @@ struct MixLs : virtual Args, MixJSON
(st.isExecutable ? "-r-xr-xr-x" : "-r--r--r--") :
st.type == FSAccessor::Type::tSymlink ? "lrwxrwxrwx" :
"dr-xr-xr-x";
- std::cout <<
- (format("%s %20d %s") % tp % st.fileSize % relPath);
+ auto line = fmt("%s %20d %s", tp, st.fileSize, relPath);
if (st.type == FSAccessor::Type::tSymlink)
- std::cout << " -> " << accessor->readLink(curPath)
- ;
- std::cout << "\n";
+ line += " -> " + accessor->readLink(curPath);
+ logger->stdout(line);
if (recursive && st.type == FSAccessor::Type::tDirectory)
doPath(st, curPath, relPath, false);
} else {
- std::cout << relPath << "\n";
+ logger->stdout(relPath);
if (recursive) {
auto st = accessor->stat(curPath);
if (st.type == FSAccessor::Type::tDirectory)
@@ -102,9 +100,11 @@ struct CmdLsStore : StoreCommand, MixLs
std::string description() override
{
- return "show information about a store path";
+ return "show information about a path in the Nix store";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
list(store->getFSAccessor());
@@ -133,12 +133,14 @@ struct CmdLsNar : Command, MixLs
std::string description() override
{
- return "show information about the contents of a NAR file";
+ return "show information about a path inside a NAR file";
}
+ Category category() override { return catUtility; }
+
void run() override
{
- list(makeNarAccessor(make_ref<std::string>(readFile(narPath, true))));
+ list(makeNarAccessor(make_ref<std::string>(readFile(narPath))));
}
};
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 3b5f5516f..ef301580a 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -8,7 +8,7 @@
#include "shared.hh"
#include "store-api.hh"
#include "progress-bar.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "finally.hh"
#include <sys/types.h>
@@ -59,15 +59,22 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
NixArgs() : MultiCommand(*RegisterCommand::commands), MixCommonArgs("nix")
{
- mkFlag()
- .longName("help")
- .description("show usage information")
- .handler([&]() { showHelpAndExit(); });
-
- mkFlag()
- .longName("help-config")
- .description("show configuration options")
- .handler([&]() {
+ categories.clear();
+ categories[Command::catDefault] = "Main commands";
+ categories[catSecondary] = "Infrequently used commands";
+ categories[catUtility] = "Utility/scripting commands";
+ categories[catNixInstallation] = "Commands for upgrading or troubleshooting your Nix installation";
+
+ addFlag({
+ .longName = "help",
+ .description = "show usage information",
+ .handler = {[&]() { showHelpAndExit(); }},
+ });
+
+ addFlag({
+ .longName = "help-config",
+ .description = "show configuration options",
+ .handler = {[&]() {
std::cout << "The following configuration options are available:\n\n";
Table2 tbl;
std::map<std::string, Config::SettingInfo> settings;
@@ -76,28 +83,33 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
tbl.emplace_back(s.first, s.second.description);
printTable(std::cout, tbl);
throw Exit();
- });
-
- mkFlag()
- .longName("print-build-logs")
- .shortName('L')
- .description("print full build logs on stderr")
- .set(&printBuildLogs, true);
-
- mkFlag()
- .longName("version")
- .description("show version information")
- .handler([&]() { printVersion(programName); });
-
- mkFlag()
- .longName("no-net")
- .description("disable substituters and consider all previously downloaded files up-to-date")
- .handler([&]() { useNet = false; });
-
- mkFlag()
- .longName("refresh")
- .description("consider all previously downloaded files out-of-date")
- .handler([&]() { refresh = true; });
+ }},
+ });
+
+ addFlag({
+ .longName = "print-build-logs",
+ .shortName = 'L',
+ .description = "print full build logs on stderr",
+ .handler = {&printBuildLogs, true},
+ });
+
+ addFlag({
+ .longName = "version",
+ .description = "show version information",
+ .handler = {[&]() { printVersion(programName); }},
+ });
+
+ addFlag({
+ .longName = "no-net",
+ .description = "disable substituters and consider all previously downloaded files up-to-date",
+ .handler = {[&]() { useNet = false; }},
+ });
+
+ addFlag({
+ .longName = "refresh",
+ .description = "consider all previously downloaded files out-of-date",
+ .handler = {[&]() { refresh = true; }},
+ });
}
void printFlags(std::ostream & out) override
@@ -105,8 +117,8 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
Args::printFlags(out);
std::cout <<
"\n"
- "In addition, most configuration settings can be overriden using '--<name> <value>'.\n"
- "Boolean settings can be overriden using '--<name>' or '--no-<name>'. See 'nix\n"
+ "In addition, most configuration settings can be overriden using '--" ANSI_ITALIC "name value" ANSI_NORMAL "'.\n"
+ "Boolean settings can be overriden using '--" ANSI_ITALIC "name" ANSI_NORMAL "' or '--no-" ANSI_ITALIC "name" ANSI_NORMAL "'. See 'nix\n"
"--help-config' for a list of configuration settings.\n";
}
@@ -115,10 +127,10 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
MultiCommand::printHelp(programName, out);
#if 0
- out << "\nFor full documentation, run 'man " << programName << "' or 'man " << programName << "-<COMMAND>'.\n";
+ out << "\nFor full documentation, run 'man " << programName << "' or 'man " << programName << "-" ANSI_ITALIC "COMMAND" ANSI_NORMAL "'.\n";
#endif
- std::cout << "\nNote: this program is EXPERIMENTAL and subject to change.\n";
+ std::cout << "\nNote: this program is " ANSI_RED "EXPERIMENTAL" ANSI_NORMAL " and subject to change.\n";
}
void showHelpAndExit()
@@ -155,12 +167,15 @@ void mainWrapped(int argc, char * * argv)
args.parseCmdline(argvToStrings(argc, argv));
- settings.requireExperimentalFeature("nix-command");
-
initPlugins();
if (!args.command) args.showHelpAndExit();
+ if (args.command->first != "repl"
+ && args.command->first != "doctor"
+ && args.command->first != "upgrade-nix")
+ settings.requireExperimentalFeature("nix-command");
+
Finally f([]() { stopProgressBar(); });
startProgressBar(args.printBuildLogs);
@@ -176,17 +191,17 @@ void mainWrapped(int argc, char * * argv)
settings.useSubstitutes = false;
if (!settings.tarballTtl.overriden)
settings.tarballTtl = std::numeric_limits<unsigned int>::max();
- if (!downloadSettings.tries.overriden)
- downloadSettings.tries = 0;
- if (!downloadSettings.connectTimeout.overriden)
- downloadSettings.connectTimeout = 1;
+ if (!fileTransferSettings.tries.overriden)
+ fileTransferSettings.tries = 0;
+ if (!fileTransferSettings.connectTimeout.overriden)
+ fileTransferSettings.connectTimeout = 1;
}
if (args.refresh)
settings.tarballTtl = 0;
- args.command->prepare();
- args.command->run();
+ args.command->second->prepare();
+ args.command->second->run();
}
}
diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc
index 93eddbb1f..3e7ff544d 100644
--- a/src/nix/make-content-addressable.cc
+++ b/src/nix/make-content-addressable.cc
@@ -31,6 +31,9 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
},
};
}
+
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
auto paths = store->topoSortPaths(storePathsToSet(storePaths));
diff --git a/src/nix/optimise-store.cc b/src/nix/optimise-store.cc
index fed012b04..b45951879 100644
--- a/src/nix/optimise-store.cc
+++ b/src/nix/optimise-store.cc
@@ -23,6 +23,8 @@ struct CmdOptimiseStore : StoreCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
store->optimiseStore();
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index 45ec297d2..88d7fffd4 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -29,6 +29,8 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
return "query information about store paths";
}
+ Category category() override { return catSecondary; }
+
Examples examples() override
{
return {
diff --git a/src/nix/ping-store.cc b/src/nix/ping-store.cc
index 3a2e542a3..127397a29 100644
--- a/src/nix/ping-store.cc
+++ b/src/nix/ping-store.cc
@@ -21,6 +21,8 @@ struct CmdPingStore : StoreCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
store->connect();
diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc
index 26631416c..8e7ba95a3 100644
--- a/src/nix/progress-bar.cc
+++ b/src/nix/progress-bar.cc
@@ -7,6 +7,7 @@
#include <atomic>
#include <map>
#include <thread>
+#include <iostream>
namespace nix {
@@ -190,8 +191,8 @@ public:
i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1));
}
- if ((type == actDownload && hasAncestor(*state, actCopyPath, parent))
- || (type == actDownload && hasAncestor(*state, actQueryPathInfo, parent))
+ if ((type == actFileTransfer && hasAncestor(*state, actCopyPath, parent))
+ || (type == actFileTransfer && hasAncestor(*state, actQueryPathInfo, parent))
|| (type == actCopyPath && hasAncestor(*state, actSubstitute, parent)))
i->visible = false;
@@ -416,7 +417,7 @@ public:
if (!s2.empty()) { res += " ("; res += s2; res += ')'; }
}
- showActivity(actDownload, "%s MiB DL", "%.1f", MiB);
+ showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB);
{
auto s = renderActivity(actOptimiseStore, "%s paths optimised");
@@ -442,6 +443,18 @@ public:
return res;
}
+
+ void writeToStdout(std::string_view s) override
+ {
+ auto state(state_.lock());
+ if (state->active) {
+ std::cerr << "\r\e[K";
+ Logger::writeToStdout(s);
+ draw(*state);
+ } else {
+ Logger::writeToStdout(s);
+ }
+ }
};
void startProgressBar(bool printBuildLogs)
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index 27727bd25..ea8ff1553 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -82,40 +82,6 @@ struct NixRepl : gc
};
-void printHelp()
-{
- std::cout
- << "Usage: nix-repl [--help] [--version] [-I path] paths...\n"
- << "\n"
- << "nix-repl is a simple read-eval-print loop (REPL) for the Nix package manager.\n"
- << "\n"
- << "Options:\n"
- << " --help\n"
- << " Prints out a summary of the command syntax and exits.\n"
- << "\n"
- << " --version\n"
- << " Prints out the Nix version number on standard output and exits.\n"
- << "\n"
- << " -I path\n"
- << " Add a path to the Nix expression search path. This option may be given\n"
- << " multiple times. See the NIX_PATH environment variable for information on\n"
- << " the semantics of the Nix search path. Paths added through -I take\n"
- << " precedence over NIX_PATH.\n"
- << "\n"
- << " paths...\n"
- << " A list of paths to files containing Nix expressions which nix-repl will\n"
- << " load and add to its scope.\n"
- << "\n"
- << " A path surrounded in < and > will be looked up in the Nix expression search\n"
- << " path, as in the Nix language itself.\n"
- << "\n"
- << " If an element of paths starts with http:// or https://, it is interpreted\n"
- << " as the URL of a tarball that will be downloaded and unpacked to a temporary\n"
- << " location. The tarball must include a single top-level directory containing\n"
- << " at least a file named default.nix.\n";
-}
-
-
string removeWhitespace(string s)
{
s = chomp(s);
@@ -809,6 +775,16 @@ struct CmdRepl : StoreCommand, MixEvalArgs
return "start an interactive environment for evaluating Nix expressions";
}
+ Examples examples() override
+ {
+ return {
+ Example{
+ "Display all special commands within the REPL:",
+ "nix repl\n nix-repl> :?"
+ }
+ };
+ }
+
void run(ref<Store> store) override
{
auto repl = std::make_unique<NixRepl>(searchPath, openStore());
diff --git a/src/nix/run.cc b/src/nix/run.cc
index f885c5e49..b888281a5 100644
--- a/src/nix/run.cc
+++ b/src/nix/run.cc
@@ -8,6 +8,7 @@
#include "fs-accessor.hh"
#include "progress-bar.hh"
#include "affinity.hh"
+#include "eval.hh"
#if __linux__
#include <sys/mount.h>
@@ -19,46 +20,59 @@ using namespace nix;
std::string chrootHelperName = "__run_in_chroot";
-struct CmdRun : InstallablesCommand
+struct RunCommon : virtual Command
{
- std::vector<std::string> command = { "bash" };
- StringSet keep, unset;
- bool ignoreEnvironment = false;
+ void runProgram(ref<Store> store,
+ const std::string & program,
+ const Strings & args)
+ {
+ stopProgressBar();
- CmdRun()
+ restoreSignals();
+
+ restoreAffinity();
+
+ /* If this is a diverted store (i.e. its "logical" location
+ (typically /nix/store) differs from its "physical" location
+ (e.g. /home/eelco/nix/store), then run the command in a
+ chroot. For non-root users, this requires running it in new
+ mount and user namespaces. Unfortunately,
+ unshare(CLONE_NEWUSER) doesn't work in a multithreaded
+ program (which "nix" is), so we exec() a single-threaded
+ helper program (chrootHelper() below) to do the work. */
+ auto store2 = store.dynamic_pointer_cast<LocalStore>();
+
+ if (store2 && store->storeDir != store2->realStoreDir) {
+ Strings helperArgs = { chrootHelperName, store->storeDir, store2->realStoreDir, program };
+ for (auto & arg : args) helperArgs.push_back(arg);
+
+ execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
+
+ throw SysError("could not execute chroot helper");
+ }
+
+ execvp(program.c_str(), stringsToCharPtrs(args).data());
+
+ throw SysError("unable to execute '%s'", program);
+ }
+};
+
+struct CmdShell : InstallablesCommand, RunCommon, MixEnvironment
+{
+ std::vector<std::string> command = { getEnv("SHELL").value_or("bash") };
+
+ CmdShell()
{
- mkFlag()
- .longName("command")
- .shortName('c')
- .description("command and arguments to be executed; defaults to 'bash'")
- .labels({"command", "args"})
- .arity(ArityAny)
- .handler([&](std::vector<std::string> ss) {
+ addFlag({
+ .longName = "command",
+ .shortName = 'c',
+ .description = "command and arguments to be executed; defaults to '$SHELL'",
+ .labels = {"command", "args"},
+ .handler = {[&](std::vector<std::string> ss) {
if (ss.empty()) throw UsageError("--command requires at least one argument");
command = ss;
- });
-
- mkFlag()
- .longName("ignore-environment")
- .shortName('i')
- .description("clear the entire environment (except those specified with --keep)")
- .set(&ignoreEnvironment, true);
-
- mkFlag()
- .longName("keep")
- .shortName('k')
- .description("keep specified environment variable")
- .arity(1)
- .labels({"name"})
- .handler([&](std::vector<std::string> ss) { keep.insert(ss.front()); });
-
- mkFlag()
- .longName("unset")
- .shortName('u')
- .description("unset specified environment variable")
- .arity(1)
- .labels({"name"})
- .handler([&](std::vector<std::string> ss) { unset.insert(ss.front()); });
+ }}
+ });
}
std::string description() override
@@ -71,19 +85,19 @@ struct CmdRun : InstallablesCommand
return {
Example{
"To start a shell providing GNU Hello from NixOS 17.03:",
- "nix run -f channel:nixos-17.03 hello"
+ "nix shell -f channel:nixos-17.03 hello"
},
Example{
"To start a shell providing youtube-dl from your 'nixpkgs' channel:",
- "nix run nixpkgs.youtube-dl"
+ "nix shell nixpkgs.youtube-dl"
},
Example{
"To run GNU Hello:",
- "nix run nixpkgs.hello -c hello --greeting 'Hi everybody!'"
+ "nix shell nixpkgs.hello -c hello --greeting 'Hi everybody!'"
},
Example{
"To run GNU Hello in a chroot store:",
- "nix run --store ~/my-nix nixpkgs.hello -c hello"
+ "nix shell --store ~/my-nix nixpkgs.hello -c hello"
},
};
}
@@ -94,35 +108,13 @@ struct CmdRun : InstallablesCommand
auto accessor = store->getFSAccessor();
- if (ignoreEnvironment) {
-
- if (!unset.empty())
- throw UsageError("--unset does not make sense with --ignore-environment");
-
- std::map<std::string, std::string> kept;
- for (auto & var : keep) {
- auto s = getenv(var.c_str());
- if (s) kept[var] = s;
- }
-
- clearEnv();
-
- for (auto & var : kept)
- setenv(var.first.c_str(), var.second.c_str(), 1);
-
- } else {
-
- if (!keep.empty())
- throw UsageError("--keep does not make sense without --ignore-environment");
-
- for (auto & var : unset)
- unsetenv(var.c_str());
- }
std::unordered_set<StorePath> done;
std::queue<StorePath> todo;
for (auto & path : outPaths) todo.push(path.clone());
+ setEnviron();
+
auto unixPath = tokenizeString<Strings>(getEnv("PATH").value_or(""), ":");
while (!todo.empty()) {
@@ -142,42 +134,14 @@ struct CmdRun : InstallablesCommand
setenv("PATH", concatStringsSep(":", unixPath).c_str(), 1);
- std::string cmd = *command.begin();
Strings args;
for (auto & arg : command) args.push_back(arg);
- stopProgressBar();
-
- restoreSignals();
-
- restoreAffinity();
-
- /* If this is a diverted store (i.e. its "logical" location
- (typically /nix/store) differs from its "physical" location
- (e.g. /home/eelco/nix/store), then run the command in a
- chroot. For non-root users, this requires running it in new
- mount and user namespaces. Unfortunately,
- unshare(CLONE_NEWUSER) doesn't work in a multithreaded
- program (which "nix" is), so we exec() a single-threaded
- helper program (chrootHelper() below) to do the work. */
- auto store2 = store.dynamic_pointer_cast<LocalStore>();
-
- if (store2 && store->storeDir != store2->realStoreDir) {
- Strings helperArgs = { chrootHelperName, store->storeDir, store2->realStoreDir, cmd };
- for (auto & arg : args) helperArgs.push_back(arg);
-
- execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
-
- throw SysError("could not execute chroot helper");
- }
-
- execvp(cmd.c_str(), stringsToCharPtrs(args).data());
-
- throw SysError("unable to exec '%s'", cmd);
+ runProgram(store, *command.begin(), args);
}
};
-static auto r1 = registerCommand<CmdRun>("run");
+static auto r1 = registerCommand<CmdShell>("shell");
void chrootHelper(int argc, char * * argv)
{
diff --git a/src/nix/search.cc b/src/nix/search.cc
index 769274543..ba72c1e79 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -40,16 +40,18 @@ struct CmdSearch : SourceExprCommand, MixJSON
{
expectArgs("regex", &res);
- mkFlag()
- .longName("update-cache")
- .shortName('u')
- .description("update the package search cache")
- .handler([&]() { writeCache = true; useCache = false; });
-
- mkFlag()
- .longName("no-cache")
- .description("do not use or update the package search cache")
- .handler([&]() { writeCache = false; useCache = false; });
+ addFlag({
+ .longName = "update-cache",
+ .shortName = 'u',
+ .description = "update the package search cache",
+ .handler = {[&]() { writeCache = true; useCache = false; }}
+ });
+
+ addFlag({
+ .longName = "no-cache",
+ .description = "do not use or update the package search cache",
+ .handler = {[&]() { writeCache = false; useCache = false; }}
+ });
}
std::string description() override
@@ -263,7 +265,7 @@ struct CmdSearch : SourceExprCommand, MixJSON
throw SysError("cannot rename '%s' to '%s'", tmpFile, jsonCacheFileName);
}
- if (results.size() == 0)
+ if (!json && results.size() == 0)
throw Error("no results for the given search term(s)!");
RunPager pager;
diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc
index 87544f937..4fd8886de 100644
--- a/src/nix/show-config.cc
+++ b/src/nix/show-config.cc
@@ -13,6 +13,8 @@ struct CmdShowConfig : Command, MixJSON
return "show the Nix configuration";
}
+ Category category() override { return catUtility; }
+
void run() override
{
if (json) {
@@ -23,7 +25,7 @@ struct CmdShowConfig : Command, MixJSON
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (auto & s : settings)
- std::cout << s.first + " = " + s.second.value + "\n";
+ logger->stdout("%s = %s", s.first, s.second.value);
}
}
};
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index 0ede7b468..22c569f3c 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -15,11 +15,12 @@ struct CmdShowDerivation : InstallablesCommand
CmdShowDerivation()
{
- mkFlag()
- .longName("recursive")
- .shortName('r')
- .description("include the dependencies of the specified derivations")
- .set(&recursive, true);
+ addFlag({
+ .longName = "recursive",
+ .shortName = 'r',
+ .description = "include the dependencies of the specified derivations",
+ .handler = {&recursive, true}
+ });
}
std::string description() override
@@ -41,6 +42,8 @@ struct CmdShowDerivation : InstallablesCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
auto drvPaths = toDerivations(store, installables, true);
diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc
index 5f07448e0..6c9b9a792 100644
--- a/src/nix/sigs.cc
+++ b/src/nix/sigs.cc
@@ -13,13 +13,13 @@ struct CmdCopySigs : StorePathsCommand
CmdCopySigs()
{
- mkFlag()
- .longName("substituter")
- .shortName('s')
- .labels({"store-uri"})
- .description("use signatures from specified store")
- .arity(1)
- .handler([&](std::vector<std::string> ss) { substituterUris.push_back(ss[0]); });
+ addFlag({
+ .longName = "substituter",
+ .shortName = 's',
+ .description = "use signatures from specified store",
+ .labels = {"store-uri"},
+ .handler = {[&](std::string s) { substituterUris.push_back(s); }},
+ });
}
std::string description() override
@@ -27,6 +27,8 @@ struct CmdCopySigs : StorePathsCommand
return "copy path signatures from substituters (like binary caches)";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
if (substituterUris.empty())
@@ -98,12 +100,13 @@ struct CmdSignPaths : StorePathsCommand
CmdSignPaths()
{
- mkFlag()
- .shortName('k')
- .longName("key-file")
- .label("file")
- .description("file containing the secret signing key")
- .dest(&secretKeyFile);
+ addFlag({
+ .longName = "key-file",
+ .shortName = 'k',
+ .description = "file containing the secret signing key",
+ .labels = {"file"},
+ .handler = {&secretKeyFile}
+ });
}
std::string description() override
@@ -111,6 +114,8 @@ struct CmdSignPaths : StorePathsCommand
return "sign the specified paths";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
if (secretKeyFile.empty())
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
index c05c29517..678780f33 100644
--- a/src/nix/upgrade-nix.cc
+++ b/src/nix/upgrade-nix.cc
@@ -1,7 +1,7 @@
#include "command.hh"
#include "common-args.hh"
#include "store-api.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "eval.hh"
#include "attr-path.hh"
#include "names.hh"
@@ -16,18 +16,20 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
CmdUpgradeNix()
{
- mkFlag()
- .longName("profile")
- .shortName('p')
- .labels({"profile-dir"})
- .description("the Nix profile to upgrade")
- .dest(&profileDir);
-
- mkFlag()
- .longName("nix-store-paths-url")
- .labels({"url"})
- .description("URL of the file that contains the store paths of the latest Nix release")
- .dest(&storePathsUrl);
+ addFlag({
+ .longName = "profile",
+ .shortName = 'p',
+ .description = "the Nix profile to upgrade",
+ .labels = {"profile-dir"},
+ .handler = {&profileDir}
+ });
+
+ addFlag({
+ .longName = "nix-store-paths-url",
+ .description = "URL of the file that contains the store paths of the latest Nix release",
+ .labels = {"url"},
+ .handler = {&storePathsUrl}
+ });
}
std::string description() override
@@ -49,6 +51,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
};
}
+ Category category() override { return catNixInstallation; }
+
void run(ref<Store> store) override
{
evalSettings.pureEval = true;
@@ -138,8 +142,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
// FIXME: use nixos.org?
- auto req = DownloadRequest(storePathsUrl);
- auto res = getDownloader()->download(req);
+ auto req = FileTransferRequest(storePathsUrl);
+ auto res = getFileTransfer()->download(req);
auto state = std::make_unique<EvalState>(Strings(), store);
auto v = state->allocValue();
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index 9b0658803..cf1fa6a99 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -20,13 +20,13 @@ struct CmdVerify : StorePathsCommand
{
mkFlag(0, "no-contents", "do not verify the contents of each store path", &noContents);
mkFlag(0, "no-trust", "do not verify whether each store path is trusted", &noTrust);
- mkFlag()
- .longName("substituter")
- .shortName('s')
- .labels({"store-uri"})
- .description("use signatures from specified store")
- .arity(1)
- .handler([&](std::vector<std::string> ss) { substituterUris.push_back(ss[0]); });
+ addFlag({
+ .longName = "substituter",
+ .shortName = 's',
+ .description = "use signatures from specified store",
+ .labels = {"store-uri"},
+ .handler = {[&](std::string s) { substituterUris.push_back(s); }}
+ });
mkIntFlag('n', "sigs-needed", "require that each path has at least N valid signatures", &sigsNeeded);
}
@@ -49,6 +49,8 @@ struct CmdVerify : StorePathsCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
std::vector<ref<Store>> substituters;
diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc
index d3b7a674a..6057beedb 100644
--- a/src/nix/why-depends.cc
+++ b/src/nix/why-depends.cc
@@ -37,11 +37,12 @@ struct CmdWhyDepends : SourceExprCommand
expectArg("package", &_package);
expectArg("dependency", &_dependency);
- mkFlag()
- .longName("all")
- .shortName('a')
- .description("show all edges in the dependency graph leading from 'package' to 'dependency', rather than just a shortest path")
- .set(&all, true);
+ addFlag({
+ .longName = "all",
+ .shortName = 'a',
+ .description = "show all edges in the dependency graph leading from 'package' to 'dependency', rather than just a shortest path",
+ .handler = {&all, true},
+ });
}
std::string description() override
@@ -67,6 +68,8 @@ struct CmdWhyDepends : SourceExprCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
auto package = parseInstallable(*this, store, _package, false);
@@ -149,7 +152,7 @@ struct CmdWhyDepends : SourceExprCommand
auto pathS = store->printStorePath(node.path);
assert(node.dist != inf);
- std::cout << fmt("%s%s%s%s" ANSI_NORMAL "\n",
+ logger->stdout("%s%s%s%s" ANSI_NORMAL,
firstPad,
node.visited ? "\e[38;5;244m" : "",
firstPad != "" ? "→ " : "",