aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/ISSUE_TEMPLATE.md27
-rw-r--r--.github/ISSUE_TEMPLATE/bug_report.md32
-rw-r--r--.github/ISSUE_TEMPLATE/feature_request.md20
-rw-r--r--.travis.yml8
-rw-r--r--Makefile5
-rw-r--r--Makefile.config.in19
-rw-r--r--README.md57
-rw-r--r--configure.ac4
-rw-r--r--doc/manual/advanced-topics/post-build-hook.xml4
-rw-r--r--doc/manual/command-ref/conf-file.xml36
-rw-r--r--doc/manual/command-ref/env-common.xml13
-rw-r--r--doc/manual/command-ref/nix-store.xml2
-rw-r--r--doc/manual/expressions/builtins.xml10
-rw-r--r--doc/manual/installation/upgrading.xml7
-rw-r--r--doc/manual/release-notes/rl-0.8.xml2
-rwxr-xr-xmaintainers/upload-release.pl86
-rw-r--r--mk/programs.mk26
-rw-r--r--mk/tracing.mk1
-rw-r--r--nix-rust/local.mk2
-rw-r--r--precompiled-headers.h3
-rw-r--r--release-common.nix1
-rw-r--r--release.nix5
-rw-r--r--src/libexpr/common-eval-args.cc45
-rw-r--r--src/libexpr/eval-inline.hh4
-rw-r--r--src/libexpr/eval.cc18
-rw-r--r--src/libexpr/json-to-value.cc63
-rw-r--r--src/libexpr/local.mk4
-rw-r--r--src/libexpr/nixexpr.hh6
-rw-r--r--src/libexpr/parser.y23
-rw-r--r--src/libexpr/primops.cc164
-rw-r--r--src/libexpr/primops.hh1
-rw-r--r--src/libexpr/primops/fetchGit.cc227
-rw-r--r--src/libexpr/primops/fetchMercurial.cc206
-rw-r--r--src/libexpr/primops/fetchTree.cc165
-rw-r--r--src/libexpr/value.hh9
-rw-r--r--src/libfetchers/attrs.cc107
-rw-r--r--src/libfetchers/attrs.hh39
-rw-r--r--src/libfetchers/cache.cc121
-rw-r--r--src/libfetchers/cache.hh34
-rw-r--r--src/libfetchers/fetchers.cc75
-rw-r--r--src/libfetchers/fetchers.hh103
-rw-r--r--src/libfetchers/git.cc438
-rw-r--r--src/libfetchers/github.cc195
-rw-r--r--src/libfetchers/local.mk11
-rw-r--r--src/libfetchers/mercurial.cc303
-rw-r--r--src/libfetchers/path.cc148
-rw-r--r--src/libfetchers/tarball.cc275
-rw-r--r--src/libfetchers/tree-info.cc14
-rw-r--r--src/libfetchers/tree-info.hh29
-rw-r--r--src/libmain/common-args.cc68
-rw-r--r--src/libmain/shared.cc66
-rw-r--r--src/libstore/build.cc74
-rw-r--r--src/libstore/builtins/fetchurl.cc10
-rw-r--r--src/libstore/derivations.cc2
-rw-r--r--src/libstore/filetransfer.cc (renamed from src/libstore/download.cc)281
-rw-r--r--src/libstore/filetransfer.hh (renamed from src/libstore/download.hh)82
-rw-r--r--src/libstore/gc.cc11
-rw-r--r--src/libstore/globals.cc56
-rw-r--r--src/libstore/globals.hh17
-rw-r--r--src/libstore/http-binary-cache-store.cc35
-rw-r--r--src/libstore/s3-binary-cache-store.cc6
-rw-r--r--src/libstore/s3.hh4
-rw-r--r--src/libstore/store-api.cc25
-rw-r--r--src/libstore/store-api.hh1
-rw-r--r--src/libutil/ansicolor.hh24
-rw-r--r--src/libutil/args.cc101
-rw-r--r--src/libutil/args.hh157
-rw-r--r--src/libutil/config.cc35
-rw-r--r--src/libutil/logging.cc9
-rw-r--r--src/libutil/logging.hh12
-rw-r--r--src/libutil/serialise.hh3
-rw-r--r--src/libutil/tests/local.mk15
-rw-r--r--src/libutil/tests/tests.cc585
-rw-r--r--src/libutil/types.hh8
-rw-r--r--src/libutil/url.cc137
-rw-r--r--src/libutil/url.hh62
-rw-r--r--src/libutil/util.cc71
-rw-r--r--src/libutil/util.hh22
-rwxr-xr-xsrc/nix-channel/nix-channel.cc21
-rw-r--r--src/nix-env/nix-env.cc47
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc6
-rw-r--r--src/nix/add-to-store.cc17
-rw-r--r--src/nix/build.cc22
-rw-r--r--src/nix/cat.cc8
-rw-r--r--src/nix/command.cc77
-rw-r--r--src/nix/command.hh6
-rw-r--r--src/nix/copy.cc49
-rw-r--r--src/nix/dev-shell.cc (renamed from src/nix/shell.cc)95
-rw-r--r--src/nix/doctor.cc4
-rw-r--r--src/nix/dump-path.cc2
-rw-r--r--src/nix/edit.cc2
-rw-r--r--src/nix/eval.cc7
-rw-r--r--src/nix/get-env.sh9
-rw-r--r--src/nix/hash.cc17
-rw-r--r--src/nix/installables.cc30
-rw-r--r--src/nix/local.mk6
-rw-r--r--src/nix/log.cc2
-rw-r--r--src/nix/ls.cc20
-rw-r--r--src/nix/main.cc96
-rw-r--r--src/nix/make-content-addressable.cc3
-rw-r--r--src/nix/optimise-store.cc2
-rw-r--r--src/nix/path-info.cc2
-rw-r--r--src/nix/ping-store.cc2
-rw-r--r--src/nix/progress-bar.cc19
-rw-r--r--src/nix/repl.cc44
-rw-r--r--src/nix/run.cc32
-rw-r--r--src/nix/search.cc24
-rw-r--r--src/nix/show-config.cc4
-rw-r--r--src/nix/show-derivation.cc13
-rw-r--r--src/nix/sigs.cc31
-rw-r--r--src/nix/upgrade-nix.cc34
-rw-r--r--src/nix/verify.cc16
-rw-r--r--src/nix/why-depends.cc15
-rw-r--r--tests/check.nix33
-rw-r--r--tests/check.sh47
-rw-r--r--tests/common.sh.in3
-rw-r--r--tests/config.sh18
-rw-r--r--tests/config/nix-with-substituters.conf2
-rw-r--r--tests/fetchGit.sh32
-rw-r--r--tests/fetchGitSubmodules.sh97
-rw-r--r--tests/init.sh2
-rw-r--r--tests/lang/eval-okay-getattrpos-functionargs.exp1
-rw-r--r--tests/lang/eval-okay-getattrpos-functionargs.nix4
-rw-r--r--tests/linux-sandbox.sh7
-rw-r--r--tests/local.mk4
-rw-r--r--tests/run.sh28
-rw-r--r--tests/shell-hello.nix (renamed from tests/run.nix)0
-rw-r--r--tests/shell.sh28
-rw-r--r--tests/tarball.sh7
129 files changed, 4607 insertions, 1666 deletions
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
deleted file mode 100644
index 3372b1f03..000000000
--- a/.github/ISSUE_TEMPLATE.md
+++ /dev/null
@@ -1,27 +0,0 @@
-<!--
-
-# Filing a Nix issue
-
-*WAIT* Are you sure you're filing your issue in the right repository?
-
-We appreciate you taking the time to tell us about issues you encounter, but routing the issue to the right place will get you help sooner and save everyone time.
-
-This is the Nix repository, and issues here should be about Nix the build and package management *_tool_*.
-
-If you have a problem with a specific package on NixOS or when using Nix, you probably want to file an issue with _nixpkgs_, whose issue tracker is over at https://github.com/NixOS/nixpkgs/issues.
-
-Examples of _Nix_ issues:
-
-- Nix segfaults when I run `nix-build -A blahblah`
-- The Nix language needs a new builtin: `builtins.foobar`
-- Regression in the behavior of `nix-env` in Nix 2.0
-
-Examples of _nixpkgs_ issues:
-
-- glibc is b0rked on aarch64
-- chromium in NixOS doesn't support U2F but google-chrome does!
-- The OpenJDK package on macOS is missing a key component
-
-Chances are if you're a newcomer to the Nix world, you'll probably want the [nixpkgs tracker](https://github.com/NixOS/nixpkgs/issues). It also gets a lot more eyeball traffic so you'll probably get a response a lot more quickly.
-
--->
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 000000000..e6d346bc1
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,32 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: bug
+assignees: ''
+
+---
+
+**Describe the bug**
+
+A clear and concise description of what the bug is.
+
+If you have a problem with a specific package or NixOS,
+you probably want to file an issue at https://github.com/NixOS/nixpkgs/issues.
+
+**Steps To Reproduce**
+
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+
+A clear and concise description of what you expected to happen.
+
+**`nix-env --version` output**
+
+**Additional context**
+
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 000000000..392ed30c6
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,20 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: ''
+labels: improvement
+assignees: ''
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index ee4ea1ac6..000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-matrix:
- include:
- - language: osx
- script: ./tests/install-darwin.sh
- - language: nix
- script: nix-build release.nix -A build.x86_64-linux
-notifications:
- email: false
diff --git a/Makefile b/Makefile
index 4fac27c47..0ba011e2a 100644
--- a/Makefile
+++ b/Makefile
@@ -3,7 +3,9 @@ makefiles = \
local.mk \
nix-rust/local.mk \
src/libutil/local.mk \
+ src/libutil/tests/local.mk \
src/libstore/local.mk \
+ src/libfetchers/local.mk \
src/libmain/local.mk \
src/libexpr/local.mk \
src/nix/local.mk \
@@ -15,8 +17,7 @@ makefiles = \
misc/upstart/local.mk \
doc/manual/local.mk \
tests/local.mk \
- tests/plugins/local.mk \
- src/error-demo/local.mk
+ tests/plugins/local.mk
-include Makefile.config
diff --git a/Makefile.config.in b/Makefile.config.in
index e7a12089a..b632444e8 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -1,36 +1,38 @@
AR = @AR@
BDW_GC_LIBS = @BDW_GC_LIBS@
+BOOST_LDFLAGS = @BOOST_LDFLAGS@
BUILD_SHARED_LIBS = @BUILD_SHARED_LIBS@
CC = @CC@
CFLAGS = @CFLAGS@
CXX = @CXX@
CXXFLAGS = @CXXFLAGS@
-LDFLAGS = @LDFLAGS@
+EDITLINE_LIBS = @EDITLINE_LIBS@
ENABLE_S3 = @ENABLE_S3@
-HAVE_SODIUM = @HAVE_SODIUM@
+GTEST_LIBS = @GTEST_LIBS@
HAVE_SECCOMP = @HAVE_SECCOMP@
-BOOST_LDFLAGS = @BOOST_LDFLAGS@
+HAVE_SODIUM = @HAVE_SODIUM@
+LDFLAGS = @LDFLAGS@
+LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
+LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
LIBCURL_LIBS = @LIBCURL_LIBS@
+LIBLZMA_LIBS = @LIBLZMA_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@
SODIUM_LIBS = @SODIUM_LIBS@
-LIBLZMA_LIBS = @LIBLZMA_LIBS@
SQLITE3_LIBS = @SQLITE3_LIBS@
-LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
-LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
-EDITLINE_LIBS = @EDITLINE_LIBS@
bash = @bash@
bindir = @bindir@
-lsof = @lsof@
datadir = @datadir@
datarootdir = @datarootdir@
+doc_generate = @doc_generate@
docdir = @docdir@
exec_prefix = @exec_prefix@
includedir = @includedir@
libdir = @libdir@
libexecdir = @libexecdir@
localstatedir = @localstatedir@
+lsof = @lsof@
mandir = @mandir@
pkglibdir = $(libdir)/$(PACKAGE_NAME)
prefix = @prefix@
@@ -38,6 +40,5 @@ sandbox_shell = @sandbox_shell@
storedir = @storedir@
sysconfdir = @sysconfdir@
system = @system@
-doc_generate = @doc_generate@
xmllint = @xmllint@
xsltproc = @xsltproc@
diff --git a/README.md b/README.md
index 61054f8f2..a1588284d 100644
--- a/README.md
+++ b/README.md
@@ -1,21 +1,54 @@
+# Nix
+
[![Open Collective supporters](https://opencollective.com/nixos/tiers/supporter/badge.svg?label=Supporters&color=brightgreen)](https://opencollective.com/nixos)
+[![Test](https://github.com/NixOS/nix/workflows/Test/badge.svg)](https://github.com/NixOS/nix/actions)
+
+Nix is a powerful package manager for Linux and other Unix systems that makes package
+management reliable and reproducible. Please refer to the [Nix manual](https://nixos.org/nix/manual)
+for more details.
+
+## Installation
+
+On Linux and macOS the easiest way to Install Nix is to run the following shell command
+(as a user other than root):
+
+```
+$ curl -L https://nixos.org/nix/install | sh
+```
+
+Information on additional installation methods is available on the [Nix download page](https://nixos.org/download.html).
+
+## Building And Developing
+
+### Building Nix
+
+You can build Nix using one of the targets provided by [release.nix](./release.nix):
+
+```
+$ nix-build ./release.nix -A build.aarch64-linux
+$ nix-build ./release.nix -A build.x86_64-darwin
+$ nix-build ./release.nix -A build.i686-linux
+$ nix-build ./release.nix -A build.x86_64-linux
+```
-Nix, the purely functional package manager
-------------------------------------------
+### Development Environment
-Nix is a new take on package management that is fairly unique. Because of its
-purity aspects, a lot of issues found in traditional package managers don't
-appear with Nix.
+You can use the provided `shell.nix` to get a working development environment:
-To find out more about the tool, usage and installation instructions, please
-read the manual, which is available on the Nix website at
-<https://nixos.org/nix/manual>.
+```
+$ nix-shell
+$ ./bootstrap.sh
+$ ./configure
+$ make
+```
-## Contributing
+## Additional Resources
-Take a look at the [Hacking Section](https://nixos.org/nix/manual/#chap-hacking)
-of the manual. It helps you to get started with building Nix from source.
+- [Nix manual](https://nixos.org/nix/manual)
+- [Nix jobsets on hydra.nixos.org](https://hydra.nixos.org/project/nix)
+- [NixOS Discourse](https://discourse.nixos.org/)
+- [IRC - #nixos on freenode.net](irc://irc.freenode.net/#nixos)
## License
-Nix is released under the LGPL v2.1
+Nix is released under the [LGPL v2.1](./COPYING).
diff --git a/configure.ac b/configure.ac
index b868343d2..c3007b4b6 100644
--- a/configure.ac
+++ b/configure.ac
@@ -266,6 +266,10 @@ if test "$gc" = yes; then
fi
+# Look for gtest.
+PKG_CHECK_MODULES([GTEST], [gtest_main])
+
+
# documentation generation switch
AC_ARG_ENABLE(doc-gen, AC_HELP_STRING([--disable-doc-gen],
[disable documentation generation]),
diff --git a/doc/manual/advanced-topics/post-build-hook.xml b/doc/manual/advanced-topics/post-build-hook.xml
index 08a7a772f..acfe9e3cc 100644
--- a/doc/manual/advanced-topics/post-build-hook.xml
+++ b/doc/manual/advanced-topics/post-build-hook.xml
@@ -61,7 +61,7 @@ substituters = https://cache.nixos.org/ s3://example-nix-cache
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
</programlisting>
-<para>we will restart the Nix daemon a later step.</para>
+<para>We will restart the Nix daemon in a later step.</para>
</section>
<section>
@@ -139,7 +139,7 @@ $ nix-store --delete /nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example
<para>Now, copy the path back from the cache:</para>
<screen>
-$ nix store --realize /nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example
+$ nix-store --realise /nix/store/ibcyipq5gf91838ldx40mjsp0b8w9n18-example
copying path '/nix/store/m8bmqwrch6l3h8s0k3d673xpmipcdpsa-example from 's3://example-nix-cache'...
warning: you did not specify '--add-root'; the result might be removed by the garbage collector
/nix/store/m8bmqwrch6l3h8s0k3d673xpmipcdpsa-example
diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml
index 48dce7c95..1820598e5 100644
--- a/doc/manual/command-ref/conf-file.xml
+++ b/doc/manual/command-ref/conf-file.xml
@@ -19,26 +19,30 @@
<refsection><title>Description</title>
-<para>Nix reads settings from two configuration files:</para>
+<para>By default Nix reads settings from the following places:</para>
+
+<para>The system-wide configuration file
+<filename><replaceable>sysconfdir</replaceable>/nix/nix.conf</filename>
+(i.e. <filename>/etc/nix/nix.conf</filename> on most systems), or
+<filename>$NIX_CONF_DIR/nix.conf</filename> if
+<envar>NIX_CONF_DIR</envar> is set. Values loaded in this file are not forwarded to the Nix daemon. The
+client assumes that the daemon has already loaded them.
+</para>
-<itemizedlist>
+<para>User-specific configuration files:</para>
- <listitem>
- <para>The system-wide configuration file
- <filename><replaceable>sysconfdir</replaceable>/nix/nix.conf</filename>
- (i.e. <filename>/etc/nix/nix.conf</filename> on most systems), or
- <filename>$NIX_CONF_DIR/nix.conf</filename> if
- <envar>NIX_CONF_DIR</envar> is set.</para>
- </listitem>
+<para>
+ If <envar>NIX_USER_CONF_FILES</envar> is set, then each path separated by
+ <literal>:</literal> will be loaded in reverse order.
+</para>
- <listitem>
- <para>The user configuration file
- <filename>$XDG_CONFIG_HOME/nix/nix.conf</filename>, or
- <filename>~/.config/nix/nix.conf</filename> if
- <envar>XDG_CONFIG_HOME</envar> is not set.</para>
- </listitem>
+<para>
+ Otherwise it will look for <filename>nix/nix.conf</filename> files in
+ <envar>XDG_CONFIG_DIRS</envar> and <envar>XDG_CONFIG_HOME</envar>.
-</itemizedlist>
+ The default location is <filename>$HOME/.config/nix.conf</filename> if
+ those environment variables are unset.
+</para>
<para>The configuration files consist of
<literal><replaceable>name</replaceable> =
diff --git a/doc/manual/command-ref/env-common.xml b/doc/manual/command-ref/env-common.xml
index 696d68c34..0217de7b2 100644
--- a/doc/manual/command-ref/env-common.xml
+++ b/doc/manual/command-ref/env-common.xml
@@ -33,7 +33,7 @@
will cause Nix to look for paths relative to
<filename>/home/eelco/Dev</filename> and
- <filename>/etc/nixos</filename>, in that order. It is also
+ <filename>/etc/nixos</filename>, in this order. It is also
possible to match paths against a prefix. For example, the value
<screen>
@@ -59,7 +59,7 @@ nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-15.09.tar.gz</sc
15.09 channel.</para>
<para>A following shorthand can be used to refer to the official channels:
-
+
<screen>nixpkgs=channel:nixos-15.09</screen>
</para>
@@ -137,12 +137,19 @@ $ mount -o bind /mnt/otherdisk/nix /nix</screen>
<varlistentry><term><envar>NIX_CONF_DIR</envar></term>
- <listitem><para>Overrides the location of the Nix configuration
+ <listitem><para>Overrides the location of the system Nix configuration
directory (default
<filename><replaceable>prefix</replaceable>/etc/nix</filename>).</para></listitem>
</varlistentry>
+<varlistentry><term><envar>NIX_USER_CONF_FILES</envar></term>
+
+ <listitem><para>Overrides the location of the user Nix configuration files
+ to load from (defaults to the XDG spec locations). The variable is treated
+ as a list separated by the <literal>:</literal> token.</para></listitem>
+
+</varlistentry>
<varlistentry><term><envar>TMPDIR</envar></term>
diff --git a/doc/manual/command-ref/nix-store.xml b/doc/manual/command-ref/nix-store.xml
index 1ddb5408d..d71f9d8e4 100644
--- a/doc/manual/command-ref/nix-store.xml
+++ b/doc/manual/command-ref/nix-store.xml
@@ -936,7 +936,7 @@ $ nix-store --add ./foo.c
<para>The operation <option>--add-fixed</option> adds the specified paths to
the Nix store. Unlike <option>--add</option> paths are registered using the
-specified hashing algorithm, resulting in the same output path as a fixed output
+specified hashing algorithm, resulting in the same output path as a fixed-output
derivation. This can be used for sources that are not available from a public
url or broke since the download expression was written.
</para>
diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml
index 394e1fc32..f71a8f3be 100644
--- a/doc/manual/expressions/builtins.xml
+++ b/doc/manual/expressions/builtins.xml
@@ -422,6 +422,16 @@ stdenv.mkDerivation { … }
</para>
</listitem>
</varlistentry>
+ <varlistentry>
+ <term>submodules</term>
+ <listitem>
+ <para>
+ A Boolean parameter that specifies whether submodules
+ should be checked out. Defaults to
+ <literal>false</literal>.
+ </para>
+ </listitem>
+ </varlistentry>
</variablelist>
<example>
diff --git a/doc/manual/installation/upgrading.xml b/doc/manual/installation/upgrading.xml
index 30670d7fe..592f63895 100644
--- a/doc/manual/installation/upgrading.xml
+++ b/doc/manual/installation/upgrading.xml
@@ -17,6 +17,11 @@
<para>
Single-user installations of Nix should run this:
- <command>nix-channel --update; nix-env -iA nixpkgs.nix</command>
+ <command>nix-channel --update; nix-env -iA nixpkgs.nix nixpkgs.cacert</command>
+ </para>
+
+ <para>
+ Multi-user Nix users on Linux should run this with sudo:
+ <command>nix-channel --update; nix-env -iA nixpkgs.nix nixpkgs.cacert; systemctl daemon-reload; systemctl restart nix-daemon</command>
</para>
</chapter>
diff --git a/doc/manual/release-notes/rl-0.8.xml b/doc/manual/release-notes/rl-0.8.xml
index 784b26c6b..825798fa9 100644
--- a/doc/manual/release-notes/rl-0.8.xml
+++ b/doc/manual/release-notes/rl-0.8.xml
@@ -8,7 +8,7 @@
<para>NOTE: the hashing scheme in Nix 0.8 changed (as detailed below).
As a result, <command>nix-pull</command> manifests and channels built
-for Nix 0.7 and below will now work anymore. However, the Nix
+for Nix 0.7 and below will not work anymore. However, the Nix
expression language has not changed, so you can still build from
source. Also, existing user environments continue to work. Nix 0.8
will automatically upgrade the database schema of previous
diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl
index 77534babb..cb584d427 100755
--- a/maintainers/upload-release.pl
+++ b/maintainers/upload-release.pl
@@ -1,5 +1,5 @@
#! /usr/bin/env nix-shell
-#! nix-shell -i perl -p perl perlPackages.LWPUserAgent perlPackages.LWPProtocolHttps perlPackages.FileSlurp gnupg1
+#! nix-shell -i perl -p perl perlPackages.LWPUserAgent perlPackages.LWPProtocolHttps perlPackages.FileSlurp perlPackages.NetAmazonS3 gnupg1
use strict;
use Data::Dumper;
@@ -9,12 +9,16 @@ use File::Slurp;
use File::Copy;
use JSON::PP;
use LWP::UserAgent;
+use Net::Amazon::S3;
my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
-my $releasesDir = "/home/eelco/mnt/releases";
+my $releasesBucketName = "nix-releases";
+my $channelsBucketName = "nix-channels";
my $nixpkgsDir = "/home/eelco/Dev/nixpkgs-pristine";
+my $TMPDIR = $ENV{'TMPDIR'} // "/tmp";
+
# FIXME: cut&paste from nixos-channel-scripts.
sub fetch {
my ($url, $type) = @_;
@@ -42,13 +46,31 @@ my $version = $1;
print STDERR "Nix revision is $nixRev, version is $version\n";
-File::Path::make_path($releasesDir);
-if (system("mountpoint -q $releasesDir") != 0) {
- system("sshfs hydra-mirror\@nixos.org:/releases $releasesDir") == 0 or die;
-}
+my $releaseDir = "nix/$releaseName";
+
+my $tmpDir = "$TMPDIR/nix-release/$releaseName";
+File::Path::make_path($tmpDir);
+
+# S3 setup.
+my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID given.";
+my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No AWS_SECRET_ACCESS_KEY given.";
+
+my $s3 = Net::Amazon::S3->new(
+ { aws_access_key_id => $aws_access_key_id,
+ aws_secret_access_key => $aws_secret_access_key,
+ retry => 1,
+ host => "s3-eu-west-1.amazonaws.com",
+ });
+
+my $releasesBucket = $s3->bucket($releasesBucketName) or die;
+
+my $s3_us = Net::Amazon::S3->new(
+ { aws_access_key_id => $aws_access_key_id,
+ aws_secret_access_key => $aws_secret_access_key,
+ retry => 1,
+ });
-my $releaseDir = "$releasesDir/nix/$releaseName";
-File::Path::make_path($releaseDir);
+my $channelsBucket = $s3_us->bucket($channelsBucketName) or die;
sub downloadFile {
my ($jobName, $productNr, $dstName) = @_;
@@ -57,40 +79,49 @@ sub downloadFile {
my $srcFile = $buildInfo->{buildproducts}->{$productNr}->{path} or die "job '$jobName' lacks product $productNr\n";
$dstName //= basename($srcFile);
- my $dstFile = "$releaseDir/" . $dstName;
+ my $tmpFile = "$tmpDir/$dstName";
- if (! -e $dstFile) {
- print STDERR "downloading $srcFile to $dstFile...\n";
- system("NIX_REMOTE=https://cache.nixos.org/ nix cat-store '$srcFile' > '$dstFile.tmp'") == 0
+ if (!-e $tmpFile) {
+ print STDERR "downloading $srcFile to $tmpFile...\n";
+ system("NIX_REMOTE=https://cache.nixos.org/ nix cat-store '$srcFile' > '$tmpFile'") == 0
or die "unable to fetch $srcFile\n";
- rename("$dstFile.tmp", $dstFile) or die;
}
my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash} or die;
- my $sha256_actual = `nix hash-file --base16 --type sha256 '$dstFile'`;
+ my $sha256_actual = `nix hash-file --base16 --type sha256 '$tmpFile'`;
chomp $sha256_actual;
if ($sha256_expected ne $sha256_actual) {
- print STDERR "file $dstFile is corrupt, got $sha256_actual, expected $sha256_expected\n";
+ print STDERR "file $tmpFile is corrupt, got $sha256_actual, expected $sha256_expected\n";
exit 1;
}
- write_file("$dstFile.sha256", $sha256_expected);
+ write_file("$tmpFile.sha256", $sha256_expected);
- if (! -e "$dstFile.asc") {
- system("gpg2 --detach-sign --armor $dstFile") == 0 or die "unable to sign $dstFile\n";
+ if (! -e "$tmpFile.asc") {
+ system("gpg2 --detach-sign --armor $tmpFile") == 0 or die "unable to sign $tmpFile\n";
}
- return ($dstFile, $sha256_expected);
+ return $sha256_expected;
}
downloadFile("tarball", "2"); # .tar.bz2
-my ($tarball, $tarballHash) = downloadFile("tarball", "3"); # .tar.xz
+my $tarballHash = downloadFile("tarball", "3"); # .tar.xz
downloadFile("binaryTarball.i686-linux", "1");
downloadFile("binaryTarball.x86_64-linux", "1");
downloadFile("binaryTarball.aarch64-linux", "1");
downloadFile("binaryTarball.x86_64-darwin", "1");
downloadFile("installerScript", "1");
+for my $fn (glob "$tmpDir/*") {
+ my $name = basename($fn);
+ my $dstKey = "$releaseDir/" . $name;
+ unless (defined $releasesBucket->head_key($dstKey)) {
+ print STDERR "uploading $fn to s3://$releasesBucketName/$dstKey...\n";
+ $releasesBucket->add_key_filename($dstKey, $fn)
+ or die $releasesBucket->err . ": " . $releasesBucket->errstr;
+ }
+}
+
exit if $version =~ /pre/;
# Update Nixpkgs in a very hacky way.
@@ -125,18 +156,11 @@ write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",
system("cd $nixpkgsDir && git commit -a -m 'nix: $oldName -> $version'") == 0 or die;
-# Extract the HTML manual.
-File::Path::make_path("$releaseDir/manual");
-
-system("tar xvf $tarball --strip-components=3 -C $releaseDir/manual --wildcards '*/doc/manual/*.html' '*/doc/manual/*.css' '*/doc/manual/*.gif' '*/doc/manual/*.png'") == 0 or die;
-
-if (! -e "$releaseDir/manual/index.html") {
- symlink("manual.html", "$releaseDir/manual/index.html") or die;
-}
-
# Update the "latest" symlink.
-symlink("$releaseName", "$releasesDir/nix/latest-tmp") or die;
-rename("$releasesDir/nix/latest-tmp", "$releasesDir/nix/latest") or die;
+$channelsBucket->add_key(
+ "nix-latest/install", "",
+ { "x-amz-website-redirect-location" => "https://releases.nixos.org/$releaseDir/install" })
+ or die $channelsBucket->err . ": " . $channelsBucket->errstr;
# Tag the release in Git.
chdir("/home/eelco/Dev/nix-pristine") or die;
diff --git a/mk/programs.mk b/mk/programs.mk
index d93df4468..3fa9685c3 100644
--- a/mk/programs.mk
+++ b/mk/programs.mk
@@ -35,24 +35,28 @@ define build-program
$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
$(1)_INSTALL_DIR ?= $$(bindir)
- $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$(1)
- $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
+ ifdef $(1)_INSTALL_DIR
- install: $(DESTDIR)$$($(1)_INSTALL_PATH)
+ $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$(1)
- ifeq ($(BUILD_SHARED_LIBS), 1)
+ $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
- _libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
+ install: $(DESTDIR)$$($(1)_INSTALL_PATH)
- $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
+ ifeq ($(BUILD_SHARED_LIBS), 1)
+
+ _libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
+
+ $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
- else
+ else
- $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_PATH) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
+ $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_PATH) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
install -t $(DESTDIR)$$($(1)_INSTALL_DIR) $$<
+ endif
endif
# Propagate CFLAGS and CXXFLAGS to the individual object files.
@@ -76,4 +80,10 @@ define build-program
programs-list += $$($(1)_PATH)
clean-files += $$($(1)_PATH) $$(_d)/*.o $$(_d)/.*.dep $$($(1)_DEPS) $$($(1)_OBJS)
dist-files += $$(_srcs)
+
+ # Phony target to run this program (typically as a dependency of 'check').
+ .PHONY: $(1)_RUN
+ $(1)_RUN: $$($(1)_PATH)
+ $(trace-test) $$($(1)_PATH)
+
endef
diff --git a/mk/tracing.mk b/mk/tracing.mk
index 13912d3c7..54c77ab60 100644
--- a/mk/tracing.mk
+++ b/mk/tracing.mk
@@ -11,6 +11,7 @@ ifeq ($(V), 0)
trace-javac = @echo " JAVAC " $@;
trace-jar = @echo " JAR " $@;
trace-mkdir = @echo " MKDIR " $@;
+ trace-test = @echo " TEST " $@;
suppress = @
diff --git a/nix-rust/local.mk b/nix-rust/local.mk
index 1e006e500..e4bfde31b 100644
--- a/nix-rust/local.mk
+++ b/nix-rust/local.mk
@@ -41,5 +41,5 @@ ifneq ($(OS), Darwin)
check: rust-tests
rust-tests:
- cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) cargo test --release $$(if [[ -d vendor ]]; then echo --offline; fi)
+ $(trace-test) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) cargo test --release $$(if [[ -d vendor ]]; then echo --offline; fi)
endif
diff --git a/precompiled-headers.h b/precompiled-headers.h
index e0d885b23..079aa496e 100644
--- a/precompiled-headers.h
+++ b/precompiled-headers.h
@@ -56,6 +56,3 @@
#include <sys/wait.h>
#include <termios.h>
#include <unistd.h>
-
-#include "util.hh"
-#include "args.hh"
diff --git a/release-common.nix b/release-common.nix
index b6c8f3d81..7e7de005d 100644
--- a/release-common.nix
+++ b/release-common.nix
@@ -55,6 +55,7 @@ rec {
# Tests
git
mercurial
+ gmock
]
++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
diff --git a/release.nix b/release.nix
index 65b035957..f5729cee3 100644
--- a/release.nix
+++ b/release.nix
@@ -241,6 +241,11 @@ let
src = nix;
+ preConfigure =
+ ''
+ ln -sfn ${vendoredCrates'}/vendor/ nix-rust/vendor
+ '';
+
enableParallelBuilding = true;
buildInputs = buildDeps ++ propagatedDeps;
diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc
index 13950ab8d..44baadd53 100644
--- a/src/libexpr/common-eval-args.cc
+++ b/src/libexpr/common-eval-args.cc
@@ -1,31 +1,36 @@
#include "common-eval-args.hh"
#include "shared.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "util.hh"
#include "eval.hh"
+#include "fetchers.hh"
+#include "store-api.hh"
namespace nix {
MixEvalArgs::MixEvalArgs()
{
- mkFlag()
- .longName("arg")
- .description("argument to be passed to Nix functions")
- .labels({"name", "expr"})
- .handler([&](std::vector<std::string> ss) { autoArgs[ss[0]] = 'E' + ss[1]; });
+ addFlag({
+ .longName = "arg",
+ .description = "argument to be passed to Nix functions",
+ .labels = {"name", "expr"},
+ .handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
+ });
- mkFlag()
- .longName("argstr")
- .description("string-valued argument to be passed to Nix functions")
- .labels({"name", "string"})
- .handler([&](std::vector<std::string> ss) { autoArgs[ss[0]] = 'S' + ss[1]; });
+ addFlag({
+ .longName = "argstr",
+ .description = "string-valued argument to be passed to Nix functions",
+ .labels = {"name", "string"},
+ .handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
+ });
- mkFlag()
- .shortName('I')
- .longName("include")
- .description("add a path to the list of locations used to look up <...> file names")
- .label("path")
- .handler([&](std::string s) { searchPath.push_back(s); });
+ addFlag({
+ .longName = "include",
+ .shortName = 'I',
+ .description = "add a path to the list of locations used to look up <...> file names",
+ .labels = {"path"},
+ .handler = {[&](std::string s) { searchPath.push_back(s); }}
+ });
}
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
@@ -46,9 +51,9 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
Path lookupFileArg(EvalState & state, string s)
{
if (isUri(s)) {
- CachedDownloadRequest request(s);
- request.unpack = true;
- return getDownloader()->downloadCached(state.store, request).path;
+ return state.store->toRealPath(
+ fetchers::downloadTarball(
+ state.store, resolveUri(s), "source", false).storePath);
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p = s.substr(1, s.size() - 2);
return state.findFile(p);
diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh
index d03633cc7..e6b838665 100644
--- a/src/libexpr/eval-inline.hh
+++ b/src/libexpr/eval-inline.hh
@@ -65,7 +65,7 @@ inline void EvalState::forceAttrs(Value & v)
inline void EvalState::forceAttrs(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (v.type != tAttrs)
throwTypeError("value is %1% while a set was expected", v, pos);
}
@@ -81,7 +81,7 @@ inline void EvalState::forceList(Value & v)
inline void EvalState::forceList(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (!v.isList())
throwTypeError("value is %1% while a list was expected", v, pos);
}
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index a19b85be4..65071206f 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -5,7 +5,7 @@
#include "derivations.hh"
#include "globals.hh"
#include "eval-inline.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "json.hh"
#include "function-trace.hh"
@@ -22,6 +22,8 @@
#if HAVE_BOEHMGC
+#define GC_INCLUDE_NEW
+
#include <gc/gc.h>
#include <gc/gc_cpp.h>
@@ -56,6 +58,12 @@ static char * dupStringWithLen(const char * s, size_t size)
}
+RootValue allocRootValue(Value * v)
+{
+ return std::allocate_shared<Value *>(traceable_allocator<Value *>(), v);
+}
+
+
static void printValue(std::ostream & str, std::set<const Value *> & active, const Value & v)
{
checkInterrupt();
@@ -1280,7 +1288,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v)
void ExprIf::eval(EvalState & state, Env & env, Value & v)
{
- (state.evalBool(env, cond) ? then : else_)->eval(state, env, v);
+ (state.evalBool(env, cond, pos) ? then : else_)->eval(state, env, v);
}
@@ -1526,7 +1534,7 @@ NixFloat EvalState::forceFloat(Value & v, const Pos & pos)
bool EvalState::forceBool(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (v.type != tBool)
throwTypeError("value is %1% while a Boolean was expected", v, pos);
return v.boolean;
@@ -1541,7 +1549,7 @@ bool EvalState::isFunctor(Value & fun)
void EvalState::forceFunction(Value & v, const Pos & pos)
{
- forceValue(v);
+ forceValue(v, pos);
if (v.type != tLambda && v.type != tPrimOp && v.type != tPrimOpApp && !isFunctor(v))
throwTypeError("value is %1% while a function was expected", v, pos);
}
@@ -1618,7 +1626,7 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v,
string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
bool coerceMore, bool copyToStore)
{
- forceValue(v);
+ forceValue(v, pos);
string s;
diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc
index 1fdce1983..76e1a26bf 100644
--- a/src/libexpr/json-to-value.cc
+++ b/src/libexpr/json-to-value.cc
@@ -4,7 +4,6 @@
#include <nlohmann/json.hpp>
using json = nlohmann::json;
-using std::unique_ptr;
namespace nix {
@@ -13,69 +12,69 @@ namespace nix {
class JSONSax : nlohmann::json_sax<json> {
class JSONState {
protected:
- unique_ptr<JSONState> parent;
- Value * v;
+ std::unique_ptr<JSONState> parent;
+ RootValue v;
public:
- virtual unique_ptr<JSONState> resolve(EvalState &)
+ virtual std::unique_ptr<JSONState> resolve(EvalState &)
{
throw std::logic_error("tried to close toplevel json parser state");
- };
- explicit JSONState(unique_ptr<JSONState>&& p) : parent(std::move(p)), v(nullptr) {};
- explicit JSONState(Value* v) : v(v) {};
- JSONState(JSONState& p) = delete;
- Value& value(EvalState & state)
+ }
+ explicit JSONState(std::unique_ptr<JSONState> && p) : parent(std::move(p)) {}
+ explicit JSONState(Value * v) : v(allocRootValue(v)) {}
+ JSONState(JSONState & p) = delete;
+ Value & value(EvalState & state)
{
- if (v == nullptr)
- v = state.allocValue();
- return *v;
- };
- virtual ~JSONState() {};
- virtual void add() {};
+ if (!v)
+ v = allocRootValue(state.allocValue());
+ return **v;
+ }
+ virtual ~JSONState() {}
+ virtual void add() {}
};
class JSONObjectState : public JSONState {
using JSONState::JSONState;
- ValueMap attrs = ValueMap();
- virtual unique_ptr<JSONState> resolve(EvalState & state) override
+ ValueMap attrs;
+ std::unique_ptr<JSONState> resolve(EvalState & state) override
{
- Value& v = parent->value(state);
+ Value & v = parent->value(state);
state.mkAttrs(v, attrs.size());
for (auto & i : attrs)
v.attrs->push_back(Attr(i.first, i.second));
return std::move(parent);
}
- virtual void add() override { v = nullptr; };
+ void add() override { v = nullptr; }
public:
- void key(string_t& name, EvalState & state)
+ void key(string_t & name, EvalState & state)
{
- attrs[state.symbols.create(name)] = &value(state);
+ attrs.insert_or_assign(state.symbols.create(name), &value(state));
}
};
class JSONListState : public JSONState {
- ValueVector values = ValueVector();
- virtual unique_ptr<JSONState> resolve(EvalState & state) override
+ ValueVector values;
+ std::unique_ptr<JSONState> resolve(EvalState & state) override
{
- Value& v = parent->value(state);
+ Value & v = parent->value(state);
state.mkList(v, values.size());
for (size_t n = 0; n < values.size(); ++n) {
v.listElems()[n] = values[n];
}
return std::move(parent);
}
- virtual void add() override {
- values.push_back(v);
+ void add() override {
+ values.push_back(*v);
v = nullptr;
- };
+ }
public:
- JSONListState(unique_ptr<JSONState>&& p, std::size_t reserve) : JSONState(std::move(p))
+ JSONListState(std::unique_ptr<JSONState> && p, std::size_t reserve) : JSONState(std::move(p))
{
values.reserve(reserve);
}
};
EvalState & state;
- unique_ptr<JSONState> rs;
+ std::unique_ptr<JSONState> rs;
template<typename T, typename... Args> inline bool handle_value(T f, Args... args)
{
@@ -107,12 +106,12 @@ public:
return handle_value(mkInt, val);
}
- bool number_float(number_float_t val, const string_t& s)
+ bool number_float(number_float_t val, const string_t & s)
{
return handle_value(mkFloat, val);
}
- bool string(string_t& val)
+ bool string(string_t & val)
{
return handle_value<void(Value&, const char*)>(mkString, val.c_str());
}
@@ -123,7 +122,7 @@ public:
return true;
}
- bool key(string_t& name)
+ bool key(string_t & name)
{
dynamic_cast<JSONObjectState*>(rs.get())->key(name, state);
return true;
diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk
index a4ccab376..917e8a1c7 100644
--- a/src/libexpr/local.mk
+++ b/src/libexpr/local.mk
@@ -6,9 +6,9 @@ libexpr_DIR := $(d)
libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexer-tab.cc $(d)/parser-tab.cc
-libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libmain -I src/libexpr
+libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
-libexpr_LIBS = libutil libstore libnixrust
+libexpr_LIBS = libutil libstore libfetchers libnixrust
libexpr_LDFLAGS =
ifneq ($(OS), FreeBSD)
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index a8bae0a61..137fe7198 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -209,9 +209,10 @@ struct ExprList : Expr
struct Formal
{
+ Pos pos;
Symbol name;
Expr * def;
- Formal(const Symbol & name, Expr * def) : name(name), def(def) { };
+ Formal(const Pos & pos, const Symbol & name, Expr * def) : pos(pos), name(name), def(def) { };
};
struct Formals
@@ -260,8 +261,9 @@ struct ExprWith : Expr
struct ExprIf : Expr
{
+ Pos pos;
Expr * cond, * then, * else_;
- ExprIf(Expr * cond, Expr * then, Expr * else_) : cond(cond), then(then), else_(else_) { };
+ ExprIf(const Pos & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { };
COMMON_METHODS
};
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index a3ad54a3d..82d5753ab 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -334,7 +334,7 @@ expr_function
;
expr_if
- : IF expr THEN expr ELSE expr { $$ = new ExprIf($2, $4, $6); }
+ : IF expr THEN expr ELSE expr { $$ = new ExprIf(CUR_POS, $2, $4, $6); }
| expr_op
;
@@ -535,8 +535,8 @@ formals
;
formal
- : ID { $$ = new Formal(data->symbols.create($1), 0); }
- | ID '?' expr { $$ = new Formal(data->symbols.create($1), $3); }
+ : ID { $$ = new Formal(CUR_POS, data->symbols.create($1), 0); }
+ | ID '?' expr { $$ = new Formal(CUR_POS, data->symbols.create($1), $3); }
;
%%
@@ -548,7 +548,8 @@ formal
#include <unistd.h>
#include "eval.hh"
-#include "download.hh"
+#include "filetransfer.hh"
+#include "fetchers.hh"
#include "store-api.hh"
@@ -692,16 +693,10 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (isUri(elem.second)) {
try {
- CachedDownloadRequest request(elem.second);
- request.unpack = true;
- res = { true, getDownloader()->downloadCached(store, request).path };
- } catch (DownloadError & e) {
- logWarning(
- ErrorInfo {
- .name = "Download Error",
- .hint = hintfmt("warning: Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
- });
-
+ res = { true, store->toRealPath(fetchers::downloadTarball(
+ store, resolveUri(elem.second), "source", false).storePath) };
+ } catch (FileTransferError & e) {
+ printError("warning: Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second);
res = { false, "" };
}
} else {
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 771136af9..10e8c6b42 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1,6 +1,5 @@
#include "archive.hh"
#include "derivations.hh"
-#include "download.hh"
#include "eval-inline.hh"
#include "eval.hh"
#include "globals.hh"
@@ -127,16 +126,16 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
}
w.attrs->sort();
- static Value * fun = nullptr;
+ static RootValue fun;
if (!fun) {
- fun = state.allocValue();
+ fun = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "imported-drv-to-derivation.nix.gen.hh"
- , "/"), *fun);
+ , "/"), **fun);
}
- state.forceFunction(*fun, pos);
- mkApp(v, *fun, w);
+ state.forceFunction(**fun, pos);
+ mkApp(v, **fun, w);
state.forceAttrs(v, pos);
} else {
state.forceAttrs(*args[0]);
@@ -259,7 +258,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
/* Return a string representing the type of the expression. */
static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
string t;
switch (args[0]->type) {
case tInt: t = "int"; break;
@@ -287,7 +286,7 @@ static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Valu
/* Determine whether the argument is the null value. */
static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tNull);
}
@@ -295,7 +294,7 @@ static void prim_isNull(EvalState & state, const Pos & pos, Value * * args, Valu
/* Determine whether the argument is a function. */
static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
bool res;
switch (args[0]->type) {
case tLambda:
@@ -314,21 +313,21 @@ static void prim_isFunction(EvalState & state, const Pos & pos, Value * * args,
/* Determine whether the argument is an integer. */
static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tInt);
}
/* Determine whether the argument is a float. */
static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tFloat);
}
/* Determine whether the argument is a string. */
static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tString);
}
@@ -336,14 +335,14 @@ static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Va
/* Determine whether the argument is a Boolean. */
static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tBool);
}
/* Determine whether the argument is a path. */
static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tPath);
}
@@ -408,7 +407,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
.hint = hintfmt("attribute 'operator' required"),
.nixCode = NixCode { .errPos = pos }
});
- state.forceValue(*op->value);
+ state.forceValue(*op->value, pos);
/* Construct the closure by applying the operator to element of
`workSet', adding the result to `workSet', continuing until
@@ -431,7 +430,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
.hint = hintfmt("attribute 'key' required"),
.nixCode = NixCode { .errPos = pos }
});
- state.forceValue(*key->value);
+ state.forceValue(*key->value, pos);
if (!doneKeys.insert(key->value).second) continue;
res.push_back(e);
@@ -443,7 +442,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
/* Add the values returned by the operator to the work set. */
for (unsigned int n = 0; n < call.listSize(); ++n) {
- state.forceValue(*call.listElems()[n]);
+ state.forceValue(*call.listElems()[n], pos);
workSet.push_back(call.listElems()[n]);
}
}
@@ -475,7 +474,7 @@ static void prim_throw(EvalState & state, const Pos & pos, Value * * args, Value
static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
try {
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
} catch (Error & e) {
PathSet context;
@@ -491,7 +490,7 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val
{
state.mkAttrs(v, 2);
try {
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
v.attrs->push_back(Attr(state.sValue, args[0]));
mkBool(*state.allocAttr(v, state.symbols.create("success")), true);
} catch (AssertionError & e) {
@@ -513,8 +512,8 @@ static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Valu
/* Evaluate the first argument, then return the second argument. */
static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[0], pos);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
@@ -524,7 +523,7 @@ static void prim_seq(EvalState & state, const Pos & pos, Value * * args, Value &
static void prim_deepSeq(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValueDeep(*args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
@@ -533,12 +532,12 @@ static void prim_deepSeq(EvalState & state, const Pos & pos, Value * * args, Val
return the second expression. Useful for debugging. */
static void prim_trace(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type == tString)
printError("trace: %1%", args[0]->string.s);
else
printError("trace: %1%", *args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
@@ -653,7 +652,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
try {
if (ignoreNulls) {
- state.forceValue(*i->value);
+ state.forceValue(*i->value, pos);
if (i->value->type == tNull) continue;
}
@@ -1124,8 +1123,10 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
throw EvalError(
ErrorInfo {
.hint = hintfmt(
- "in 'toFile': the file '%1%' cannot refer to derivation outputs", \
- name),
+ "in 'toFile': the file named '%1%' must not contain a reference "
+ "to a derivation but contains (%2%)"
+ name,
+ path),
.nixCode = NixCode { .errPos = pos }
});
refs.insert(state.store->parseStorePath(path));
@@ -1201,7 +1202,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.nixCode = NixCode { .errPos = pos }
});
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(
ErrorInfo {
@@ -1239,7 +1240,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
} else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "filter") {
- state.forceValue(*attr.value);
+ state.forceValue(*attr.value, pos);
filterFun = attr.value;
} else if (n == "recursive")
recursive = state.forceBool(*attr.value, *attr.pos);
@@ -1324,7 +1325,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
- state.forceValue(*i->value);
+ state.forceValue(*i->value, pos);
v = *i->value;
}
@@ -1354,7 +1355,7 @@ static void prim_hasAttr(EvalState & state, const Pos & pos, Value * * args, Val
/* Determine whether the argument is a set. */
static void prim_isAttrs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->type == tAttrs);
}
@@ -1489,7 +1490,7 @@ static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Va
*/
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(
ErrorInfo {
@@ -1504,9 +1505,12 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
}
state.mkAttrs(v, args[0]->lambda.fun->formals->formals.size());
- for (auto & i : args[0]->lambda.fun->formals->formals)
+ for (auto & i : args[0]->lambda.fun->formals->formals) {
// !!! should optimise booleans (allocate only once)
- mkBool(*state.allocAttr(v, i.name), i.def);
+ Value * value = state.allocValue();
+ v.attrs->push_back(Attr(i.name, value, &i.pos));
+ mkBool(*value, i.def);
+ }
v.attrs->sort();
}
@@ -1537,7 +1541,7 @@ static void prim_mapAttrs(EvalState & state, const Pos & pos, Value * * args, Va
/* Determine whether the argument is a list. */
static void prim_isList(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
+ state.forceValue(*args[0], pos);
mkBool(v, args[0]->isList());
}
@@ -1551,8 +1555,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
.hint = hintfmt("list index %1% is out of bounds", n),
.nixCode = NixCode { .errPos = pos }
});
-
- state.forceValue(*list.listElems()[n]);
+ state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n];
}
@@ -1680,9 +1683,9 @@ static void prim_foldlStrict(EvalState & state, const Pos & pos, Value * * args,
vCur = n == args[2]->listSize() - 1 ? &v : state.allocValue();
state.callFunction(vTmp, *args[2]->listElems()[n], *vCur, pos);
}
- state.forceValue(v);
+ state.forceValue(v, pos);
} else {
- state.forceValue(*args[1]);
+ state.forceValue(*args[1], pos);
v = *args[1];
}
}
@@ -1752,7 +1755,7 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
auto len = args[1]->listSize();
state.mkList(v, len);
for (unsigned int n = 0; n < len; ++n) {
- state.forceValue(*args[1]->listElems()[n]);
+ state.forceValue(*args[1]->listElems()[n], pos);
v.listElems()[n] = args[1]->listElems()[n];
}
@@ -1787,7 +1790,7 @@ static void prim_partition(EvalState & state, const Pos & pos, Value * * args, V
for (unsigned int n = 0; n < len; ++n) {
auto vElem = args[1]->listElems()[n];
- state.forceValue(*vElem);
+ state.forceValue(*vElem, pos);
Value res;
state.callFunction(*args[0], *vElem, res, pos);
if (state.forceBool(res, pos))
@@ -1928,8 +1931,8 @@ static void prim_bitXor(EvalState & state, const Pos & pos, Value * * args, Valu
static void prim_lessThan(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
- state.forceValue(*args[0]);
- state.forceValue(*args[1]);
+ state.forceValue(*args[0], pos);
+ state.forceValue(*args[1], pos);
CompareValues comp;
mkBool(v, comp(args[0], args[1]));
}
@@ -2250,77 +2253,6 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
/*************************************************************
- * Networking
- *************************************************************/
-
-
-void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
- const string & who, bool unpack, const std::string & defaultName)
-{
- CachedDownloadRequest request("");
- request.unpack = unpack;
- request.name = defaultName;
-
- state.forceValue(*args[0]);
-
- if (args[0]->type == tAttrs) {
-
- state.forceAttrs(*args[0], pos);
-
- for (auto & attr : *args[0]->attrs) {
- string n(attr.name);
- if (n == "url")
- request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
- else if (n == "sha256")
- request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
- else if (n == "name")
- request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
- else
- throw EvalError(
- ErrorInfo {
- .hint = hintfmt("unsupported argument '%1%' to '%2%'", attr.name, who),
- .nixCode = NixCode { .errPos = pos }
- });
-
- }
-
- if (request.uri.empty())
- throw EvalError(
- ErrorInfo {
- .hint = hintfmt("'url' argument required"),
- .nixCode = NixCode { .errPos = pos }
- });
-
- } else
- request.uri = state.forceStringNoCtx(*args[0], pos);
-
- state.checkURI(request.uri);
-
- if (evalSettings.pureEval && !request.expectedHash)
- throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
-
- auto res = getDownloader()->downloadCached(state.store, request);
-
- if (state.allowedPaths)
- state.allowedPaths->insert(res.path);
-
- mkString(v, res.storePath, PathSet({res.storePath}));
-}
-
-
-static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- fetch(state, pos, args, v, "fetchurl", false, "");
-}
-
-
-static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
-{
- fetch(state, pos, args, v, "fetchTarball", true, "source");
-}
-
-
-/*************************************************************
* Primop registration
*************************************************************/
@@ -2502,10 +2434,6 @@ void EvalState::createBaseEnv()
addPrimOp("derivationStrict", 1, prim_derivationStrict);
addPrimOp("placeholder", 1, prim_placeholder);
- // Networking
- addPrimOp("__fetchurl", 1, prim_fetchurl);
- addPrimOp("fetchTarball", 1, prim_fetchTarball);
-
/* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */
string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index c790b30f6..05d0792ef 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -20,6 +20,7 @@ struct RegisterPrimOp
them. */
/* Load a ValueInitializer from a DSO and return whatever it initializes */
void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value & v);
+
/* Execute a program and parse its output */
void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v);
diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc
index 68ffed513..52826b56c 100644
--- a/src/libexpr/primops/fetchGit.cc
+++ b/src/libexpr/primops/fetchGit.cc
@@ -1,203 +1,19 @@
#include "primops.hh"
#include "eval-inline.hh"
-#include "download.hh"
#include "store-api.hh"
-#include "pathlocks.hh"
#include "hash.hh"
-#include "tarfile.hh"
-
-#include <sys/time.h>
-
-#include <regex>
-
-#include <nlohmann/json.hpp>
-
-using namespace std::string_literals;
+#include "fetchers.hh"
+#include "url.hh"
namespace nix {
-struct GitInfo
-{
- Path storePath;
- std::string rev;
- std::string shortRev;
- uint64_t revCount = 0;
-};
-
-std::regex revRegex("^[0-9a-fA-F]{40}$");
-
-GitInfo exportGit(ref<Store> store, const std::string & uri,
- std::optional<std::string> ref, std::string rev,
- const std::string & name)
-{
- if (evalSettings.pureEval && rev == "")
- throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
-
- if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) {
-
- bool clean = true;
-
- try {
- runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" });
- } catch (ExecError & e) {
- if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
- clean = false;
- }
-
- if (!clean) {
-
- /* This is an unclean working tree. So copy all tracked files. */
- GitInfo gitInfo;
- gitInfo.rev = "0000000000000000000000000000000000000000";
- gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
-
- auto files = tokenizeString<std::set<std::string>>(
- runProgram("git", true, { "-C", uri, "ls-files", "-z" }), "\0"s);
-
- PathFilter filter = [&](const Path & p) -> bool {
- assert(hasPrefix(p, uri));
- std::string file(p, uri.size() + 1);
-
- auto st = lstat(p);
-
- if (S_ISDIR(st.st_mode)) {
- auto prefix = file + "/";
- auto i = files.lower_bound(prefix);
- return i != files.end() && hasPrefix(*i, prefix);
- }
-
- return files.count(file);
- };
-
- gitInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, htSHA256, filter));
-
- return gitInfo;
- }
-
- // clean working tree, but no ref or rev specified. Use 'HEAD'.
- rev = chomp(runProgram("git", true, { "-C", uri, "rev-parse", "HEAD" }));
- ref = "HEAD"s;
- }
-
- if (!ref) ref = "HEAD"s;
-
- if (rev != "" && !std::regex_match(rev, revRegex))
- throw Error("invalid Git revision '%s'", rev);
-
- deletePath(getCacheDir() + "/nix/git");
-
- Path cacheDir = getCacheDir() + "/nix/gitv2/" + hashString(htSHA256, uri).to_string(Base32, false);
-
- if (!pathExists(cacheDir)) {
- createDirs(dirOf(cacheDir));
- runProgram("git", true, { "init", "--bare", cacheDir });
- }
-
- Path localRefFile;
- if (ref->compare(0, 5, "refs/") == 0)
- localRefFile = cacheDir + "/" + *ref;
- else
- localRefFile = cacheDir + "/refs/heads/" + *ref;
-
- bool doFetch;
- time_t now = time(0);
- /* If a rev was specified, we need to fetch if it's not in the
- repo. */
- if (rev != "") {
- try {
- runProgram("git", true, { "-C", cacheDir, "cat-file", "-e", rev });
- doFetch = false;
- } catch (ExecError & e) {
- if (WIFEXITED(e.status)) {
- doFetch = true;
- } else {
- throw;
- }
- }
- } else {
- /* If the local ref is older than ‘tarball-ttl’ seconds, do a
- git fetch to update the local ref to the remote ref. */
- struct stat st;
- doFetch = stat(localRefFile.c_str(), &st) != 0 ||
- (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
- }
- if (doFetch)
- {
- Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri));
-
- // FIXME: git stderr messes up our progress indicator, so
- // we're using --quiet for now. Should process its stderr.
- runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, fmt("%s:%s", *ref, *ref) });
-
- struct timeval times[2];
- times[0].tv_sec = now;
- times[0].tv_usec = 0;
- times[1].tv_sec = now;
- times[1].tv_usec = 0;
-
- utimes(localRefFile.c_str(), times);
- }
-
- // FIXME: check whether rev is an ancestor of ref.
- GitInfo gitInfo;
- gitInfo.rev = rev != "" ? rev : chomp(readFile(localRefFile));
- gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
-
- printTalkative("using revision %s of repo '%s'", gitInfo.rev, uri);
-
- std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + gitInfo.rev).to_string(Base32, false);
- Path storeLink = cacheDir + "/" + storeLinkName + ".link";
- PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink)); // FIXME: broken
-
- try {
- auto json = nlohmann::json::parse(readFile(storeLink));
-
- assert(json["name"] == name && json["rev"] == gitInfo.rev);
-
- gitInfo.storePath = json["storePath"];
-
- if (store->isValidPath(store->parseStorePath(gitInfo.storePath))) {
- gitInfo.revCount = json["revCount"];
- return gitInfo;
- }
-
- } catch (SysError & e) {
- if (e.errNo != ENOENT) throw;
- }
-
- auto source = sinkToSource([&](Sink & sink) {
- RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
- gitOptions.standardOut = &sink;
- runProgram2(gitOptions);
- });
-
- Path tmpDir = createTempDir();
- AutoDelete delTmpDir(tmpDir, true);
-
- unpackTarfile(*source, tmpDir);
-
- gitInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
-
- gitInfo.revCount = std::stoull(runProgram("git", true, { "-C", cacheDir, "rev-list", "--count", gitInfo.rev }));
-
- nlohmann::json json;
- json["storePath"] = gitInfo.storePath;
- json["uri"] = uri;
- json["name"] = name;
- json["rev"] = gitInfo.rev;
- json["revCount"] = gitInfo.revCount;
-
- writeFile(storeLink, json.dump());
-
- return gitInfo;
-}
-
static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
std::string url;
std::optional<std::string> ref;
- std::string rev;
+ std::optional<Hash> rev;
std::string name = "source";
+ bool fetchSubmodules = false;
PathSet context;
state.forceValue(*args[0]);
@@ -213,9 +29,11 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else if (n == "ref")
ref = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "rev")
- rev = state.forceStringNoCtx(*attr.value, *attr.pos);
+ rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA1);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "submodules")
+ fetchSubmodules = state.forceBool(*attr.value, *attr.pos);
else
throw EvalError(
ErrorInfo {
@@ -238,17 +56,36 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
// whitelist. Ah well.
state.checkURI(url);
- auto gitInfo = exportGit(state.store, url, ref, rev, name);
+ if (evalSettings.pureEval && !rev)
+ throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
+
+ fetchers::Attrs attrs;
+ attrs.insert_or_assign("type", "git");
+ attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
+ if (ref) attrs.insert_or_assign("ref", *ref);
+ if (rev) attrs.insert_or_assign("rev", rev->gitRev());
+ if (fetchSubmodules) attrs.insert_or_assign("submodules", true);
+ auto input = fetchers::inputFromAttrs(attrs);
+
+ // FIXME: use name?
+ auto [tree, input2] = input->fetchTree(state.store);
state.mkAttrs(v, 8);
- mkString(*state.allocAttr(v, state.sOutPath), gitInfo.storePath, PathSet({gitInfo.storePath}));
- mkString(*state.allocAttr(v, state.symbols.create("rev")), gitInfo.rev);
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), gitInfo.shortRev);
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), gitInfo.revCount);
+ auto storePath = state.store->printStorePath(tree.storePath);
+ mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
+ // Backward compatibility: set 'rev' to
+ // 0000000000000000000000000000000000000000 for a dirty tree.
+ auto rev2 = input2->getRev().value_or(Hash(htSHA1));
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
+ // Backward compatibility: set 'revCount' to 0 for a dirty tree.
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")),
+ tree.info.revCount.value_or(0));
+ mkBool(*state.allocAttr(v, state.symbols.create("submodules")), fetchSubmodules);
v.attrs->sort();
if (state.allowedPaths)
- state.allowedPaths->insert(state.store->toRealPath(gitInfo.storePath));
+ state.allowedPaths->insert(tree.actualPath);
}
static RegisterPrimOp r("fetchGit", 1, prim_fetchGit);
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index ce99928e2..1e8b9d4a0 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -1,174 +1,18 @@
#include "primops.hh"
#include "eval-inline.hh"
-#include "download.hh"
#include "store-api.hh"
-#include "pathlocks.hh"
-
-#include <sys/time.h>
+#include "fetchers.hh"
+#include "url.hh"
#include <regex>
-#include <nlohmann/json.hpp>
-
-using namespace std::string_literals;
-
namespace nix {
-struct HgInfo
-{
- Path storePath;
- std::string branch;
- std::string rev;
- uint64_t revCount = 0;
-};
-
-std::regex commitHashRegex("^[0-9a-fA-F]{40}$");
-
-HgInfo exportMercurial(ref<Store> store, const std::string & uri,
- std::string rev, const std::string & name)
-{
- if (evalSettings.pureEval && rev == "")
- throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
-
- if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) {
-
- bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == "";
-
- if (!clean) {
-
- /* This is an unclean working tree. So copy all tracked
- files. */
-
- printTalkative("copying unclean Mercurial working tree '%s'", uri);
-
- HgInfo hgInfo;
- hgInfo.rev = "0000000000000000000000000000000000000000";
- hgInfo.branch = chomp(runProgram("hg", true, { "branch", "-R", uri }));
-
- auto files = tokenizeString<std::set<std::string>>(
- runProgram("hg", true, { "status", "-R", uri, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
-
- PathFilter filter = [&](const Path & p) -> bool {
- assert(hasPrefix(p, uri));
- std::string file(p, uri.size() + 1);
-
- auto st = lstat(p);
-
- if (S_ISDIR(st.st_mode)) {
- auto prefix = file + "/";
- auto i = files.lower_bound(prefix);
- return i != files.end() && hasPrefix(*i, prefix);
- }
-
- return files.count(file);
- };
-
- hgInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, htSHA256, filter));
-
- return hgInfo;
- }
- }
-
- if (rev == "") rev = "default";
-
- Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, uri).to_string(Base32, false));
-
- Path stampFile = fmt("%s/.hg/%s.stamp", cacheDir, hashString(htSHA512, rev).to_string(Base32, false));
-
- /* If we haven't pulled this repo less than ‘tarball-ttl’ seconds,
- do so now. */
- time_t now = time(0);
- struct stat st;
- if (stat(stampFile.c_str(), &st) != 0 ||
- (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now)
- {
- /* Except that if this is a commit hash that we already have,
- we don't have to pull again. */
- if (!(std::regex_match(rev, commitHashRegex)
- && pathExists(cacheDir)
- && runProgram(
- RunOptions("hg", { "log", "-R", cacheDir, "-r", rev, "--template", "1" })
- .killStderr(true)).second == "1"))
- {
- Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", uri));
-
- if (pathExists(cacheDir)) {
- try {
- runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
- }
- catch (ExecError & e) {
- string transJournal = cacheDir + "/.hg/store/journal";
- /* hg throws "abandoned transaction" error only if this file exists */
- if (pathExists(transJournal)) {
- runProgram("hg", true, { "recover", "-R", cacheDir });
- runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
- } else {
- throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status)));
- }
- }
- } else {
- createDirs(dirOf(cacheDir));
- runProgram("hg", true, { "clone", "--noupdate", "--", uri, cacheDir });
- }
- }
-
- writeFile(stampFile, "");
- }
-
- auto tokens = tokenizeString<std::vector<std::string>>(
- runProgram("hg", true, { "log", "-R", cacheDir, "-r", rev, "--template", "{node} {rev} {branch}" }));
- assert(tokens.size() == 3);
-
- HgInfo hgInfo;
- hgInfo.rev = tokens[0];
- hgInfo.revCount = std::stoull(tokens[1]);
- hgInfo.branch = tokens[2];
-
- std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + hgInfo.rev).to_string(Base32, false);
- Path storeLink = fmt("%s/.hg/%s.link", cacheDir, storeLinkName);
-
- try {
- auto json = nlohmann::json::parse(readFile(storeLink));
-
- assert(json["name"] == name && json["rev"] == hgInfo.rev);
-
- hgInfo.storePath = json["storePath"];
-
- if (store->isValidPath(store->parseStorePath(hgInfo.storePath))) {
- printTalkative("using cached Mercurial store path '%s'", hgInfo.storePath);
- return hgInfo;
- }
-
- } catch (SysError & e) {
- if (e.errNo != ENOENT) throw;
- }
-
- Path tmpDir = createTempDir();
- AutoDelete delTmpDir(tmpDir, true);
-
- runProgram("hg", true, { "archive", "-R", cacheDir, "-r", rev, tmpDir });
-
- deletePath(tmpDir + "/.hg_archival.txt");
-
- hgInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
-
- nlohmann::json json;
- json["storePath"] = hgInfo.storePath;
- json["uri"] = uri;
- json["name"] = name;
- json["branch"] = hgInfo.branch;
- json["rev"] = hgInfo.rev;
- json["revCount"] = hgInfo.revCount;
-
- writeFile(storeLink, json.dump());
-
- return hgInfo;
-}
-
static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
std::string url;
- std::string rev;
+ std::optional<Hash> rev;
+ std::optional<std::string> ref;
std::string name = "source";
PathSet context;
@@ -182,8 +26,15 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
string n(attr.name);
if (n == "url")
url = state.coerceToString(*attr.pos, *attr.value, context, false, false);
- else if (n == "rev")
- rev = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "rev") {
+ // Ugly: unlike fetchGit, here the "rev" attribute can
+ // be both a revision or a branch/tag name.
+ auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
+ if (std::regex_match(value, revRegex))
+ rev = Hash(value, htSHA1);
+ else
+ ref = value;
+ }
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
@@ -209,18 +60,35 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
// whitelist. Ah well.
state.checkURI(url);
- auto hgInfo = exportMercurial(state.store, url, rev, name);
+ if (evalSettings.pureEval && !rev)
+ throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
+
+ fetchers::Attrs attrs;
+ attrs.insert_or_assign("type", "hg");
+ attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
+ if (ref) attrs.insert_or_assign("ref", *ref);
+ if (rev) attrs.insert_or_assign("rev", rev->gitRev());
+ auto input = fetchers::inputFromAttrs(attrs);
+
+ // FIXME: use name
+ auto [tree, input2] = input->fetchTree(state.store);
state.mkAttrs(v, 8);
- mkString(*state.allocAttr(v, state.sOutPath), hgInfo.storePath, PathSet({hgInfo.storePath}));
- mkString(*state.allocAttr(v, state.symbols.create("branch")), hgInfo.branch);
- mkString(*state.allocAttr(v, state.symbols.create("rev")), hgInfo.rev);
- mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(hgInfo.rev, 0, 12));
- mkInt(*state.allocAttr(v, state.symbols.create("revCount")), hgInfo.revCount);
+ auto storePath = state.store->printStorePath(tree.storePath);
+ mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
+ if (input2->getRef())
+ mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
+ // Backward compatibility: set 'rev' to
+ // 0000000000000000000000000000000000000000 for a dirty tree.
+ auto rev2 = input2->getRev().value_or(Hash(htSHA1));
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
+ if (tree.info.revCount)
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
v.attrs->sort();
if (state.allowedPaths)
- state.allowedPaths->insert(state.store->toRealPath(hgInfo.storePath));
+ state.allowedPaths->insert(tree.actualPath);
}
static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial);
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
new file mode 100644
index 000000000..c5a0d9886
--- /dev/null
+++ b/src/libexpr/primops/fetchTree.cc
@@ -0,0 +1,165 @@
+#include "primops.hh"
+#include "eval-inline.hh"
+#include "store-api.hh"
+#include "fetchers.hh"
+#include "filetransfer.hh"
+
+#include <ctime>
+#include <iomanip>
+
+namespace nix {
+
+void emitTreeAttrs(
+ EvalState & state,
+ const fetchers::Tree & tree,
+ std::shared_ptr<const fetchers::Input> input,
+ Value & v)
+{
+ state.mkAttrs(v, 8);
+
+ auto storePath = state.store->printStorePath(tree.storePath);
+
+ mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
+
+ assert(tree.info.narHash);
+ mkString(*state.allocAttr(v, state.symbols.create("narHash")),
+ tree.info.narHash.to_string(SRI));
+
+ if (input->getRev()) {
+ mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
+ mkString(*state.allocAttr(v, state.symbols.create("shortRev")), input->getRev()->gitShortRev());
+ }
+
+ if (tree.info.revCount)
+ mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
+
+ if (tree.info.lastModified)
+ mkString(*state.allocAttr(v, state.symbols.create("lastModified")),
+ fmt("%s", std::put_time(std::gmtime(&*tree.info.lastModified), "%Y%m%d%H%M%S")));
+
+ v.attrs->sort();
+}
+
+static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ settings.requireExperimentalFeature("flakes");
+
+ std::shared_ptr<const fetchers::Input> input;
+ PathSet context;
+
+ state.forceValue(*args[0]);
+
+ if (args[0]->type == tAttrs) {
+ state.forceAttrs(*args[0], pos);
+
+ fetchers::Attrs attrs;
+
+ for (auto & attr : *args[0]->attrs) {
+ state.forceValue(*attr.value);
+ if (attr.value->type == tString)
+ attrs.emplace(attr.name, attr.value->string.s);
+ else if (attr.value->type == tBool)
+ attrs.emplace(attr.name, attr.value->boolean);
+ else
+ throw TypeError("fetchTree argument '%s' is %s while a string or Boolean is expected",
+ attr.name, showType(*attr.value));
+ }
+
+ if (!attrs.count("type"))
+ throw Error("attribute 'type' is missing in call to 'fetchTree', at %s", pos);
+
+ input = fetchers::inputFromAttrs(attrs);
+ } else
+ input = fetchers::inputFromURL(state.coerceToString(pos, *args[0], context, false, false));
+
+ if (evalSettings.pureEval && !input->isImmutable())
+ throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input");
+
+ // FIXME: use fetchOrSubstituteTree
+ auto [tree, input2] = input->fetchTree(state.store);
+
+ if (state.allowedPaths)
+ state.allowedPaths->insert(tree.actualPath);
+
+ emitTreeAttrs(state, tree, input2, v);
+}
+
+static RegisterPrimOp r("fetchTree", 1, prim_fetchTree);
+
+static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
+ const string & who, bool unpack, std::string name)
+{
+ std::optional<std::string> url;
+ std::optional<Hash> expectedHash;
+
+ state.forceValue(*args[0]);
+
+ if (args[0]->type == tAttrs) {
+
+ state.forceAttrs(*args[0], pos);
+
+ for (auto & attr : *args[0]->attrs) {
+ string n(attr.name);
+ if (n == "url")
+ url = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "sha256")
+ expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
+ else if (n == "name")
+ name = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else
+ throw EvalError("unsupported argument '%s' to '%s', at %s",
+ attr.name, who, *attr.pos);
+ }
+
+ if (!url)
+ throw EvalError("'url' argument required, at %s", pos);
+
+ } else
+ url = state.forceStringNoCtx(*args[0], pos);
+
+ url = resolveUri(*url);
+
+ state.checkURI(*url);
+
+ if (name == "")
+ name = baseNameOf(*url);
+
+ if (evalSettings.pureEval && !expectedHash)
+ throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
+
+ auto storePath =
+ unpack
+ ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
+ : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
+
+ auto path = state.store->toRealPath(storePath);
+
+ if (expectedHash) {
+ auto hash = unpack
+ ? state.store->queryPathInfo(storePath)->narHash
+ : hashFile(htSHA256, path);
+ if (hash != *expectedHash)
+ throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
+ *url, expectedHash->to_string(), hash.to_string());
+ }
+
+ if (state.allowedPaths)
+ state.allowedPaths->insert(path);
+
+ mkString(v, path, PathSet({path}));
+}
+
+static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ fetch(state, pos, args, v, "fetchurl", false, "");
+}
+
+static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ fetch(state, pos, args, v, "fetchTarball", true, "source");
+}
+
+static RegisterPrimOp r2("__fetchurl", 1, prim_fetchurl);
+static RegisterPrimOp r3("fetchTarball", 1, prim_fetchTarball);
+
+}
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 689373873..71025824e 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -253,12 +253,17 @@ void mkPath(Value & v, const char * s);
#if HAVE_BOEHMGC
-typedef std::vector<Value *, gc_allocator<Value *> > ValueVector;
-typedef std::map<Symbol, Value *, std::less<Symbol>, gc_allocator<std::pair<const Symbol, Value *> > > ValueMap;
+typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
+typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
#else
typedef std::vector<Value *> ValueVector;
typedef std::map<Symbol, Value *> ValueMap;
#endif
+/* A value allocated in traceable memory. */
+typedef std::shared_ptr<Value *> RootValue;
+
+RootValue allocRootValue(Value * v);
+
}
diff --git a/src/libfetchers/attrs.cc b/src/libfetchers/attrs.cc
new file mode 100644
index 000000000..feb0a6085
--- /dev/null
+++ b/src/libfetchers/attrs.cc
@@ -0,0 +1,107 @@
+#include "attrs.hh"
+#include "fetchers.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+Attrs jsonToAttrs(const nlohmann::json & json)
+{
+ Attrs attrs;
+
+ for (auto & i : json.items()) {
+ if (i.value().is_number())
+ attrs.emplace(i.key(), i.value().get<int64_t>());
+ else if (i.value().is_string())
+ attrs.emplace(i.key(), i.value().get<std::string>());
+ else if (i.value().is_boolean())
+ attrs.emplace(i.key(), i.value().get<bool>());
+ else
+ throw Error("unsupported input attribute type in lock file");
+ }
+
+ return attrs;
+}
+
+nlohmann::json attrsToJson(const Attrs & attrs)
+{
+ nlohmann::json json;
+ for (auto & attr : attrs) {
+ if (auto v = std::get_if<int64_t>(&attr.second)) {
+ json[attr.first] = *v;
+ } else if (auto v = std::get_if<std::string>(&attr.second)) {
+ json[attr.first] = *v;
+ } else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
+ json[attr.first] = v->t;
+ } else abort();
+ }
+ return json;
+}
+
+std::optional<std::string> maybeGetStrAttr(const Attrs & attrs, const std::string & name)
+{
+ auto i = attrs.find(name);
+ if (i == attrs.end()) return {};
+ if (auto v = std::get_if<std::string>(&i->second))
+ return *v;
+ throw Error("input attribute '%s' is not a string %s", name, attrsToJson(attrs).dump());
+}
+
+std::string getStrAttr(const Attrs & attrs, const std::string & name)
+{
+ auto s = maybeGetStrAttr(attrs, name);
+ if (!s)
+ throw Error("input attribute '%s' is missing", name);
+ return *s;
+}
+
+std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name)
+{
+ auto i = attrs.find(name);
+ if (i == attrs.end()) return {};
+ if (auto v = std::get_if<int64_t>(&i->second))
+ return *v;
+ throw Error("input attribute '%s' is not an integer", name);
+}
+
+int64_t getIntAttr(const Attrs & attrs, const std::string & name)
+{
+ auto s = maybeGetIntAttr(attrs, name);
+ if (!s)
+ throw Error("input attribute '%s' is missing", name);
+ return *s;
+}
+
+std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name)
+{
+ auto i = attrs.find(name);
+ if (i == attrs.end()) return {};
+ if (auto v = std::get_if<int64_t>(&i->second))
+ return *v;
+ throw Error("input attribute '%s' is not a Boolean", name);
+}
+
+bool getBoolAttr(const Attrs & attrs, const std::string & name)
+{
+ auto s = maybeGetBoolAttr(attrs, name);
+ if (!s)
+ throw Error("input attribute '%s' is missing", name);
+ return *s;
+}
+
+std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
+{
+ std::map<std::string, std::string> query;
+ for (auto & attr : attrs) {
+ if (auto v = std::get_if<int64_t>(&attr.second)) {
+ query.insert_or_assign(attr.first, fmt("%d", *v));
+ } else if (auto v = std::get_if<std::string>(&attr.second)) {
+ query.insert_or_assign(attr.first, *v);
+ } else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
+ query.insert_or_assign(attr.first, v->t ? "1" : "0");
+ } else abort();
+ }
+ return query;
+}
+
+}
diff --git a/src/libfetchers/attrs.hh b/src/libfetchers/attrs.hh
new file mode 100644
index 000000000..d6e0ae000
--- /dev/null
+++ b/src/libfetchers/attrs.hh
@@ -0,0 +1,39 @@
+#pragma once
+
+#include "types.hh"
+
+#include <variant>
+
+#include <nlohmann/json_fwd.hpp>
+
+namespace nix::fetchers {
+
+/* Wrap bools to prevent string literals (i.e. 'char *') from being
+ cast to a bool in Attr. */
+template<typename T>
+struct Explicit {
+ T t;
+};
+
+typedef std::variant<std::string, int64_t, Explicit<bool>> Attr;
+typedef std::map<std::string, Attr> Attrs;
+
+Attrs jsonToAttrs(const nlohmann::json & json);
+
+nlohmann::json attrsToJson(const Attrs & attrs);
+
+std::optional<std::string> maybeGetStrAttr(const Attrs & attrs, const std::string & name);
+
+std::string getStrAttr(const Attrs & attrs, const std::string & name);
+
+std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name);
+
+int64_t getIntAttr(const Attrs & attrs, const std::string & name);
+
+std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name);
+
+bool getBoolAttr(const Attrs & attrs, const std::string & name);
+
+std::map<std::string, std::string> attrsToQuery(const Attrs & attrs);
+
+}
diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc
new file mode 100644
index 000000000..e1c7f3dee
--- /dev/null
+++ b/src/libfetchers/cache.cc
@@ -0,0 +1,121 @@
+#include "cache.hh"
+#include "sqlite.hh"
+#include "sync.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+static const char * schema = R"sql(
+
+create table if not exists Cache (
+ input text not null,
+ info text not null,
+ path text not null,
+ immutable integer not null,
+ timestamp integer not null,
+ primary key (input)
+);
+)sql";
+
+struct CacheImpl : Cache
+{
+ struct State
+ {
+ SQLite db;
+ SQLiteStmt add, lookup;
+ };
+
+ Sync<State> _state;
+
+ CacheImpl()
+ {
+ auto state(_state.lock());
+
+ auto dbPath = getCacheDir() + "/nix/fetcher-cache-v1.sqlite";
+ createDirs(dirOf(dbPath));
+
+ state->db = SQLite(dbPath);
+ state->db.isCache();
+ state->db.exec(schema);
+
+ state->add.create(state->db,
+ "insert or replace into Cache(input, info, path, immutable, timestamp) values (?, ?, ?, ?, ?)");
+
+ state->lookup.create(state->db,
+ "select info, path, immutable, timestamp from Cache where input = ?");
+ }
+
+ void add(
+ ref<Store> store,
+ const Attrs & inAttrs,
+ const Attrs & infoAttrs,
+ const StorePath & storePath,
+ bool immutable) override
+ {
+ _state.lock()->add.use()
+ (attrsToJson(inAttrs).dump())
+ (attrsToJson(infoAttrs).dump())
+ (store->printStorePath(storePath))
+ (immutable)
+ (time(0)).exec();
+ }
+
+ std::optional<std::pair<Attrs, StorePath>> lookup(
+ ref<Store> store,
+ const Attrs & inAttrs) override
+ {
+ if (auto res = lookupExpired(store, inAttrs)) {
+ if (!res->expired)
+ return std::make_pair(std::move(res->infoAttrs), std::move(res->storePath));
+ debug("ignoring expired cache entry '%s'",
+ attrsToJson(inAttrs).dump());
+ }
+ return {};
+ }
+
+ std::optional<Result> lookupExpired(
+ ref<Store> store,
+ const Attrs & inAttrs) override
+ {
+ auto state(_state.lock());
+
+ auto inAttrsJson = attrsToJson(inAttrs).dump();
+
+ auto stmt(state->lookup.use()(inAttrsJson));
+ if (!stmt.next()) {
+ debug("did not find cache entry for '%s'", inAttrsJson);
+ return {};
+ }
+
+ auto infoJson = stmt.getStr(0);
+ auto storePath = store->parseStorePath(stmt.getStr(1));
+ auto immutable = stmt.getInt(2) != 0;
+ auto timestamp = stmt.getInt(3);
+
+ store->addTempRoot(storePath);
+ if (!store->isValidPath(storePath)) {
+ // FIXME: we could try to substitute 'storePath'.
+ debug("ignoring disappeared cache entry '%s'", inAttrsJson);
+ return {};
+ }
+
+ debug("using cache entry '%s' -> '%s', '%s'",
+ inAttrsJson, infoJson, store->printStorePath(storePath));
+
+ return Result {
+ .expired = !immutable && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
+ .infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJson)),
+ .storePath = std::move(storePath)
+ };
+ }
+};
+
+ref<Cache> getCache()
+{
+ static auto cache = std::make_shared<CacheImpl>();
+ return ref<Cache>(cache);
+}
+
+}
diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh
new file mode 100644
index 000000000..d76ab1233
--- /dev/null
+++ b/src/libfetchers/cache.hh
@@ -0,0 +1,34 @@
+#pragma once
+
+#include "fetchers.hh"
+
+namespace nix::fetchers {
+
+struct Cache
+{
+ virtual void add(
+ ref<Store> store,
+ const Attrs & inAttrs,
+ const Attrs & infoAttrs,
+ const StorePath & storePath,
+ bool immutable) = 0;
+
+ virtual std::optional<std::pair<Attrs, StorePath>> lookup(
+ ref<Store> store,
+ const Attrs & inAttrs) = 0;
+
+ struct Result
+ {
+ bool expired = false;
+ Attrs infoAttrs;
+ StorePath storePath;
+ };
+
+ virtual std::optional<Result> lookupExpired(
+ ref<Store> store,
+ const Attrs & inAttrs) = 0;
+};
+
+ref<Cache> getCache();
+
+}
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
new file mode 100644
index 000000000..94ac30e38
--- /dev/null
+++ b/src/libfetchers/fetchers.cc
@@ -0,0 +1,75 @@
+#include "fetchers.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+std::unique_ptr<std::vector<std::unique_ptr<InputScheme>>> inputSchemes = nullptr;
+
+void registerInputScheme(std::unique_ptr<InputScheme> && inputScheme)
+{
+ if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::unique_ptr<InputScheme>>>();
+ inputSchemes->push_back(std::move(inputScheme));
+}
+
+std::unique_ptr<Input> inputFromURL(const ParsedURL & url)
+{
+ for (auto & inputScheme : *inputSchemes) {
+ auto res = inputScheme->inputFromURL(url);
+ if (res) return res;
+ }
+ throw Error("input '%s' is unsupported", url.url);
+}
+
+std::unique_ptr<Input> inputFromURL(const std::string & url)
+{
+ return inputFromURL(parseURL(url));
+}
+
+std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
+{
+ auto attrs2(attrs);
+ attrs2.erase("narHash");
+ for (auto & inputScheme : *inputSchemes) {
+ auto res = inputScheme->inputFromAttrs(attrs2);
+ if (res) {
+ if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
+ // FIXME: require SRI hash.
+ res->narHash = Hash(*narHash);
+ return res;
+ }
+ }
+ throw Error("input '%s' is unsupported", attrsToJson(attrs));
+}
+
+Attrs Input::toAttrs() const
+{
+ auto attrs = toAttrsInternal();
+ if (narHash)
+ attrs.emplace("narHash", narHash->to_string(SRI));
+ attrs.emplace("type", type());
+ return attrs;
+}
+
+std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store) const
+{
+ auto [tree, input] = fetchTreeInternal(store);
+
+ if (tree.actualPath == "")
+ tree.actualPath = store->toRealPath(tree.storePath);
+
+ if (!tree.info.narHash)
+ tree.info.narHash = store->queryPathInfo(tree.storePath)->narHash;
+
+ if (input->narHash)
+ assert(input->narHash == tree.info.narHash);
+
+ if (narHash && narHash != input->narHash)
+ throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
+ to_string(), tree.actualPath, narHash->to_string(SRI), input->narHash->to_string(SRI));
+
+ return {std::move(tree), input};
+}
+
+}
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
new file mode 100644
index 000000000..59a58ae67
--- /dev/null
+++ b/src/libfetchers/fetchers.hh
@@ -0,0 +1,103 @@
+#pragma once
+
+#include "types.hh"
+#include "hash.hh"
+#include "path.hh"
+#include "tree-info.hh"
+#include "attrs.hh"
+#include "url.hh"
+
+#include <memory>
+
+namespace nix { class Store; }
+
+namespace nix::fetchers {
+
+struct Input;
+
+struct Tree
+{
+ Path actualPath;
+ StorePath storePath;
+ TreeInfo info;
+};
+
+struct Input : std::enable_shared_from_this<Input>
+{
+ std::optional<Hash> narHash; // FIXME: implement
+
+ virtual std::string type() const = 0;
+
+ virtual ~Input() { }
+
+ virtual bool operator ==(const Input & other) const { return false; }
+
+ /* Check whether this is a "direct" input, that is, not
+ one that goes through a registry. */
+ virtual bool isDirect() const { return true; }
+
+ /* Check whether this is an "immutable" input, that is,
+ one that contains a commit hash or content hash. */
+ virtual bool isImmutable() const { return (bool) narHash; }
+
+ virtual bool contains(const Input & other) const { return false; }
+
+ virtual std::optional<std::string> getRef() const { return {}; }
+
+ virtual std::optional<Hash> getRev() const { return {}; }
+
+ virtual ParsedURL toURL() const = 0;
+
+ std::string to_string() const
+ {
+ return toURL().to_string();
+ }
+
+ Attrs toAttrs() const;
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTree(ref<Store> store) const;
+
+private:
+
+ virtual std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(ref<Store> store) const = 0;
+
+ virtual Attrs toAttrsInternal() const = 0;
+};
+
+struct InputScheme
+{
+ virtual ~InputScheme() { }
+
+ virtual std::unique_ptr<Input> inputFromURL(const ParsedURL & url) = 0;
+
+ virtual std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) = 0;
+};
+
+std::unique_ptr<Input> inputFromURL(const ParsedURL & url);
+
+std::unique_ptr<Input> inputFromURL(const std::string & url);
+
+std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs);
+
+void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
+
+struct DownloadFileResult
+{
+ StorePath storePath;
+ std::string etag;
+ std::string effectiveUrl;
+};
+
+DownloadFileResult downloadFile(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable);
+
+Tree downloadTarball(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable);
+
+}
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
new file mode 100644
index 000000000..7c18cf67f
--- /dev/null
+++ b/src/libfetchers/git.cc
@@ -0,0 +1,438 @@
+#include "fetchers.hh"
+#include "cache.hh"
+#include "globals.hh"
+#include "tarfile.hh"
+#include "store-api.hh"
+
+#include <sys/time.h>
+
+using namespace std::string_literals;
+
+namespace nix::fetchers {
+
+static std::string readHead(const Path & path)
+{
+ return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
+}
+
+static bool isNotDotGitDirectory(const Path & path)
+{
+ static const std::regex gitDirRegex("^(?:.*/)?\\.git$");
+
+ return not std::regex_match(path, gitDirRegex);
+}
+
+struct GitInput : Input
+{
+ ParsedURL url;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+ bool shallow = false;
+ bool submodules = false;
+
+ GitInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "git"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const GitInput *>(&other);
+ return
+ other2
+ && url == other2->url
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev || narHash;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ ParsedURL toURL() const override
+ {
+ ParsedURL url2(url);
+ if (url2.scheme != "git") url2.scheme = "git+" + url2.scheme;
+ if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
+ if (ref) url2.query.insert_or_assign("ref", *ref);
+ if (shallow) url2.query.insert_or_assign("shallow", "1");
+ return url2;
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ if (shallow)
+ attrs.emplace("shallow", true);
+ if (submodules)
+ attrs.emplace("submodules", true);
+ return attrs;
+ }
+
+ std::pair<bool, std::string> getActualUrl() const
+ {
+ // Don't clone file:// URIs (but otherwise treat them the
+ // same as remote URIs, i.e. don't use the working tree or
+ // HEAD).
+ static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
+ bool isLocal = url.scheme == "file" && !forceHttp;
+ return {isLocal, isLocal ? url.path : url.base};
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto name = "source";
+
+ auto input = std::make_shared<GitInput>(*this);
+
+ assert(!rev || rev->type == htSHA1);
+
+ std::string cacheType = "git";
+ if (shallow) cacheType += "-shallow";
+ if (submodules) cacheType += "-submodules";
+
+ auto getImmutableAttrs = [&]()
+ {
+ return Attrs({
+ {"type", cacheType},
+ {"name", name},
+ {"rev", input->rev->gitRev()},
+ });
+ };
+
+ auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
+ -> std::pair<Tree, std::shared_ptr<const Input>>
+ {
+ assert(input->rev);
+ assert(!rev || rev == input->rev);
+ return {
+ Tree {
+ .actualPath = store->toRealPath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = shallow ? std::nullopt : std::optional(getIntAttr(infoAttrs, "revCount")),
+ .lastModified = getIntAttr(infoAttrs, "lastModified"),
+ },
+ },
+ input
+ };
+ };
+
+ if (rev) {
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+ }
+
+ auto [isLocal, actualUrl_] = getActualUrl();
+ auto actualUrl = actualUrl_; // work around clang bug
+
+ // If this is a local directory and no ref or revision is
+ // given, then allow the use of an unclean working tree.
+ if (!input->ref && !input->rev && isLocal) {
+ bool clean = false;
+
+ /* Check whether this repo has any commits. There are
+ probably better ways to do this. */
+ auto gitDir = actualUrl + "/.git";
+ auto commonGitDir = chomp(runProgram(
+ "git",
+ true,
+ { "-C", actualUrl, "rev-parse", "--git-common-dir" }
+ ));
+ if (commonGitDir != ".git")
+ gitDir = commonGitDir;
+
+ bool haveCommits = !readDirectory(gitDir + "/refs/heads").empty();
+
+ try {
+ if (haveCommits) {
+ runProgram("git", true, { "-C", actualUrl, "diff-index", "--quiet", "HEAD", "--" });
+ clean = true;
+ }
+ } catch (ExecError & e) {
+ if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
+ }
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked files. */
+
+ if (!settings.allowDirty)
+ throw Error("Git tree '%s' is dirty", actualUrl);
+
+ if (settings.warnDirty)
+ warn("Git tree '%s' is dirty", actualUrl);
+
+ auto gitOpts = Strings({ "-C", actualUrl, "ls-files", "-z" });
+ if (submodules)
+ gitOpts.emplace_back("--recurse-submodules");
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("git", true, gitOpts), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, actualUrl));
+ std::string file(p, actualUrl.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto prefix = file + "/";
+ auto i = files.lower_bound(prefix);
+ return i != files.end() && hasPrefix(*i, prefix);
+ }
+
+ return files.count(file);
+ };
+
+ auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
+
+ auto tree = Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ // FIXME: maybe we should use the timestamp of the last
+ // modified dirty file?
+ .lastModified = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0,
+ }
+ };
+
+ return {std::move(tree), input};
+ }
+ }
+
+ if (!input->ref) input->ref = isLocal ? readHead(actualUrl) : "master";
+
+ Attrs mutableAttrs({
+ {"type", cacheType},
+ {"name", name},
+ {"url", actualUrl},
+ {"ref", *input->ref},
+ });
+
+ Path repoDir;
+
+ if (isLocal) {
+
+ if (!input->rev)
+ input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
+
+ repoDir = actualUrl;
+
+ } else {
+
+ if (auto res = getCache()->lookup(store, mutableAttrs)) {
+ auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
+ if (!rev || rev == rev2) {
+ input->rev = rev2;
+ return makeResult(res->first, std::move(res->second));
+ }
+ }
+
+ Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
+ repoDir = cacheDir;
+
+ if (!pathExists(cacheDir)) {
+ createDirs(dirOf(cacheDir));
+ runProgram("git", true, { "init", "--bare", repoDir });
+ }
+
+ Path localRefFile =
+ input->ref->compare(0, 5, "refs/") == 0
+ ? cacheDir + "/" + *input->ref
+ : cacheDir + "/refs/heads/" + *input->ref;
+
+ bool doFetch;
+ time_t now = time(0);
+
+ /* If a rev was specified, we need to fetch if it's not in the
+ repo. */
+ if (input->rev) {
+ try {
+ runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input->rev->gitRev() });
+ doFetch = false;
+ } catch (ExecError & e) {
+ if (WIFEXITED(e.status)) {
+ doFetch = true;
+ } else {
+ throw;
+ }
+ }
+ } else {
+ /* If the local ref is older than ‘tarball-ttl’ seconds, do a
+ git fetch to update the local ref to the remote ref. */
+ struct stat st;
+ doFetch = stat(localRefFile.c_str(), &st) != 0 ||
+ (uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
+ }
+
+ if (doFetch) {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
+
+ // FIXME: git stderr messes up our progress indicator, so
+ // we're using --quiet for now. Should process its stderr.
+ try {
+ runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", *input->ref, *input->ref) });
+ } catch (Error & e) {
+ if (!pathExists(localRefFile)) throw;
+ warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
+ }
+
+ struct timeval times[2];
+ times[0].tv_sec = now;
+ times[0].tv_usec = 0;
+ times[1].tv_sec = now;
+ times[1].tv_usec = 0;
+
+ utimes(localRefFile.c_str(), times);
+ }
+
+ if (!input->rev)
+ input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
+ }
+
+ bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
+
+ if (isShallow && !shallow)
+ throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
+
+ // FIXME: check whether rev is an ancestor of ref.
+
+ printTalkative("using revision %s of repo '%s'", input->rev->gitRev(), actualUrl);
+
+ /* Now that we know the ref, check again whether we have it in
+ the store. */
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+
+ Path tmpDir = createTempDir();
+ AutoDelete delTmpDir(tmpDir, true);
+ PathFilter filter = defaultPathFilter;
+
+ if (submodules) {
+ Path tmpGitDir = createTempDir();
+ AutoDelete delTmpGitDir(tmpGitDir, true);
+
+ runProgram("git", true, { "init", tmpDir, "--separate-git-dir", tmpGitDir });
+ // TODO: repoDir might lack the ref (it only checks if rev
+ // exists, see FIXME above) so use a big hammer and fetch
+ // everything to ensure we get the rev.
+ runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
+ "--update-head-ok", "--", repoDir, "refs/*:refs/*" });
+
+ runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input->rev->gitRev() });
+ runProgram("git", true, { "-C", tmpDir, "remote", "add", "origin", actualUrl });
+ runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
+
+ filter = isNotDotGitDirectory;
+ } else {
+ // FIXME: should pipe this, or find some better way to extract a
+ // revision.
+ auto source = sinkToSource([&](Sink & sink) {
+ RunOptions gitOptions("git", { "-C", repoDir, "archive", input->rev->gitRev() });
+ gitOptions.standardOut = &sink;
+ runProgram2(gitOptions);
+ });
+
+ unpackTarfile(*source, tmpDir);
+ }
+
+ auto storePath = store->addToStore(name, tmpDir, true, htSHA256, filter);
+
+ auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
+
+ Attrs infoAttrs({
+ {"rev", input->rev->gitRev()},
+ {"lastModified", lastModified},
+ });
+
+ if (!shallow)
+ infoAttrs.insert_or_assign("revCount",
+ std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input->rev->gitRev() })));
+
+ if (!this->rev)
+ getCache()->add(
+ store,
+ mutableAttrs,
+ infoAttrs,
+ storePath,
+ false);
+
+ getCache()->add(
+ store,
+ getImmutableAttrs(),
+ infoAttrs,
+ storePath,
+ true);
+
+ return makeResult(infoAttrs, std::move(storePath));
+ }
+};
+
+struct GitInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "git" &&
+ url.scheme != "git+http" &&
+ url.scheme != "git+https" &&
+ url.scheme != "git+ssh" &&
+ url.scheme != "git+file") return nullptr;
+
+ auto url2(url);
+ if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
+ url2.query.clear();
+
+ Attrs attrs;
+ attrs.emplace("type", "git");
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev" || name == "ref")
+ attrs.emplace(name, value);
+ else
+ url2.query.emplace(name, value);
+ }
+
+ attrs.emplace("url", url2.to_string());
+
+ return inputFromAttrs(attrs);
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "git") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules")
+ throw Error("unsupported Git input attribute '%s'", name);
+
+ auto input = std::make_unique<GitInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto ref = maybeGetStrAttr(attrs, "ref")) {
+ if (!std::regex_match(*ref, refRegex))
+ throw BadURL("invalid Git branch/tag name '%s'", *ref);
+ input->ref = *ref;
+ }
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+
+ input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);
+
+ input->submodules = maybeGetBoolAttr(attrs, "submodules").value_or(false);
+
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
+
+}
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
new file mode 100644
index 000000000..8675a5a66
--- /dev/null
+++ b/src/libfetchers/github.cc
@@ -0,0 +1,195 @@
+#include "filetransfer.hh"
+#include "cache.hh"
+#include "fetchers.hh"
+#include "globals.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+std::regex ownerRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
+std::regex repoRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
+
+struct GitHubInput : Input
+{
+ std::string owner;
+ std::string repo;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+
+ std::string type() const override { return "github"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const GitHubInput *>(&other);
+ return
+ other2
+ && owner == other2->owner
+ && repo == other2->repo
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev || narHash;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ ParsedURL toURL() const override
+ {
+ auto path = owner + "/" + repo;
+ assert(!(ref && rev));
+ if (ref) path += "/" + *ref;
+ if (rev) path += "/" + rev->to_string(Base16, false);
+ return ParsedURL {
+ .scheme = "github",
+ .path = path,
+ };
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("owner", owner);
+ attrs.emplace("repo", repo);
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto rev = this->rev;
+ auto ref = this->ref.value_or("master");
+
+ if (!rev) {
+ auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
+ owner, repo, ref);
+ auto json = nlohmann::json::parse(
+ readFile(
+ store->toRealPath(
+ downloadFile(store, url, "source", false).storePath)));
+ rev = Hash(json["sha"], htSHA1);
+ debug("HEAD revision for '%s' is %s", url, rev->gitRev());
+ }
+
+ auto input = std::make_shared<GitHubInput>(*this);
+ input->ref = {};
+ input->rev = *rev;
+
+ Attrs immutableAttrs({
+ {"type", "git-tarball"},
+ {"rev", rev->gitRev()},
+ });
+
+ if (auto res = getCache()->lookup(store, immutableAttrs)) {
+ return {
+ Tree{
+ .actualPath = store->toRealPath(res->second),
+ .storePath = std::move(res->second),
+ .info = TreeInfo {
+ .lastModified = getIntAttr(res->first, "lastModified"),
+ },
+ },
+ input
+ };
+ }
+
+ // FIXME: use regular /archive URLs instead? api.github.com
+ // might have stricter rate limits.
+
+ auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
+ owner, repo, rev->to_string(Base16, false));
+
+ std::string accessToken = settings.githubAccessToken.get();
+ if (accessToken != "")
+ url += "?access_token=" + accessToken;
+
+ auto tree = downloadTarball(store, url, "source", true);
+
+ getCache()->add(
+ store,
+ immutableAttrs,
+ {
+ {"rev", rev->gitRev()},
+ {"lastModified", *tree.info.lastModified}
+ },
+ tree.storePath,
+ true);
+
+ return {std::move(tree), input};
+ }
+};
+
+struct GitHubInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "github") return nullptr;
+
+ auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
+ auto input = std::make_unique<GitHubInput>();
+
+ if (path.size() == 2) {
+ } else if (path.size() == 3) {
+ if (std::regex_match(path[2], revRegex))
+ input->rev = Hash(path[2], htSHA1);
+ else if (std::regex_match(path[2], refRegex))
+ input->ref = path[2];
+ else
+ throw BadURL("in GitHub URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
+ } else
+ throw BadURL("GitHub URL '%s' is invalid", url.url);
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev") {
+ if (input->rev)
+ throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
+ input->rev = Hash(value, htSHA1);
+ }
+ else if (name == "ref") {
+ if (!std::regex_match(value, refRegex))
+ throw BadURL("GitHub URL '%s' contains an invalid branch/tag name", url.url);
+ if (input->ref)
+ throw BadURL("GitHub URL '%s' contains multiple branch/tag names", url.url);
+ input->ref = value;
+ }
+ }
+
+ if (input->ref && input->rev)
+ throw BadURL("GitHub URL '%s' contains both a commit hash and a branch/tag name", url.url);
+
+ input->owner = path[0];
+ input->repo = path[1];
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "github") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev")
+ throw Error("unsupported GitHub input attribute '%s'", name);
+
+ auto input = std::make_unique<GitHubInput>();
+ input->owner = getStrAttr(attrs, "owner");
+ input->repo = getStrAttr(attrs, "repo");
+ input->ref = maybeGetStrAttr(attrs, "ref");
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
+
+}
diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk
new file mode 100644
index 000000000..d7143d8a6
--- /dev/null
+++ b/src/libfetchers/local.mk
@@ -0,0 +1,11 @@
+libraries += libfetchers
+
+libfetchers_NAME = libnixfetchers
+
+libfetchers_DIR := $(d)
+
+libfetchers_SOURCES := $(wildcard $(d)/*.cc)
+
+libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
+
+libfetchers_LIBS = libutil libstore libnixrust
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
new file mode 100644
index 000000000..1d6571571
--- /dev/null
+++ b/src/libfetchers/mercurial.cc
@@ -0,0 +1,303 @@
+#include "fetchers.hh"
+#include "cache.hh"
+#include "globals.hh"
+#include "tarfile.hh"
+#include "store-api.hh"
+
+#include <sys/time.h>
+
+using namespace std::string_literals;
+
+namespace nix::fetchers {
+
+struct MercurialInput : Input
+{
+ ParsedURL url;
+ std::optional<std::string> ref;
+ std::optional<Hash> rev;
+
+ MercurialInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "hg"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const MercurialInput *>(&other);
+ return
+ other2
+ && url == other2->url
+ && rev == other2->rev
+ && ref == other2->ref;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) rev || narHash;
+ }
+
+ std::optional<std::string> getRef() const override { return ref; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ ParsedURL toURL() const override
+ {
+ ParsedURL url2(url);
+ url2.scheme = "hg+" + url2.scheme;
+ if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
+ if (ref) url2.query.insert_or_assign("ref", *ref);
+ return url;
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (ref)
+ attrs.emplace("ref", *ref);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ return attrs;
+ }
+
+ std::pair<bool, std::string> getActualUrl() const
+ {
+ bool isLocal = url.scheme == "file";
+ return {isLocal, isLocal ? url.path : url.base};
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto name = "source";
+
+ auto input = std::make_shared<MercurialInput>(*this);
+
+ auto [isLocal, actualUrl_] = getActualUrl();
+ auto actualUrl = actualUrl_; // work around clang bug
+
+ // FIXME: return lastModified.
+
+ // FIXME: don't clone local repositories.
+
+ if (!input->ref && !input->rev && isLocal && pathExists(actualUrl + "/.hg")) {
+
+ bool clean = runProgram("hg", true, { "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == "";
+
+ if (!clean) {
+
+ /* This is an unclean working tree. So copy all tracked
+ files. */
+
+ if (!settings.allowDirty)
+ throw Error("Mercurial tree '%s' is unclean", actualUrl);
+
+ if (settings.warnDirty)
+ warn("Mercurial tree '%s' is unclean", actualUrl);
+
+ input->ref = chomp(runProgram("hg", true, { "branch", "-R", actualUrl }));
+
+ auto files = tokenizeString<std::set<std::string>>(
+ runProgram("hg", true, { "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
+
+ PathFilter filter = [&](const Path & p) -> bool {
+ assert(hasPrefix(p, actualUrl));
+ std::string file(p, actualUrl.size() + 1);
+
+ auto st = lstat(p);
+
+ if (S_ISDIR(st.st_mode)) {
+ auto prefix = file + "/";
+ auto i = files.lower_bound(prefix);
+ return i != files.end() && hasPrefix(*i, prefix);
+ }
+
+ return files.count(file);
+ };
+
+ auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
+
+ return {Tree {
+ .actualPath = store->printStorePath(storePath),
+ .storePath = std::move(storePath),
+ }, input};
+ }
+ }
+
+ if (!input->ref) input->ref = "default";
+
+ auto getImmutableAttrs = [&]()
+ {
+ return Attrs({
+ {"type", "hg"},
+ {"name", name},
+ {"rev", input->rev->gitRev()},
+ });
+ };
+
+ auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
+ -> std::pair<Tree, std::shared_ptr<const Input>>
+ {
+ assert(input->rev);
+ assert(!rev || rev == input->rev);
+ return {
+ Tree{
+ .actualPath = store->toRealPath(storePath),
+ .storePath = std::move(storePath),
+ .info = TreeInfo {
+ .revCount = getIntAttr(infoAttrs, "revCount"),
+ },
+ },
+ input
+ };
+ };
+
+ if (input->rev) {
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+ }
+
+ assert(input->rev || input->ref);
+ auto revOrRef = input->rev ? input->rev->gitRev() : *input->ref;
+
+ Attrs mutableAttrs({
+ {"type", "hg"},
+ {"name", name},
+ {"url", actualUrl},
+ {"ref", *input->ref},
+ });
+
+ if (auto res = getCache()->lookup(store, mutableAttrs)) {
+ auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
+ if (!rev || rev == rev2) {
+ input->rev = rev2;
+ return makeResult(res->first, std::move(res->second));
+ }
+ }
+
+ Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
+
+ /* If this is a commit hash that we already have, we don't
+ have to pull again. */
+ if (!(input->rev
+ && pathExists(cacheDir)
+ && runProgram(
+ RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
+ .killStderr(true)).second == "1"))
+ {
+ Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
+
+ if (pathExists(cacheDir)) {
+ try {
+ runProgram("hg", true, { "pull", "-R", cacheDir, "--", actualUrl });
+ }
+ catch (ExecError & e) {
+ string transJournal = cacheDir + "/.hg/store/journal";
+ /* hg throws "abandoned transaction" error only if this file exists */
+ if (pathExists(transJournal)) {
+ runProgram("hg", true, { "recover", "-R", cacheDir });
+ runProgram("hg", true, { "pull", "-R", cacheDir, "--", actualUrl });
+ } else {
+ throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status)));
+ }
+ }
+ } else {
+ createDirs(dirOf(cacheDir));
+ runProgram("hg", true, { "clone", "--noupdate", "--", actualUrl, cacheDir });
+ }
+ }
+
+ auto tokens = tokenizeString<std::vector<std::string>>(
+ runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
+ assert(tokens.size() == 3);
+
+ input->rev = Hash(tokens[0], htSHA1);
+ auto revCount = std::stoull(tokens[1]);
+ input->ref = tokens[2];
+
+ if (auto res = getCache()->lookup(store, getImmutableAttrs()))
+ return makeResult(res->first, std::move(res->second));
+
+ Path tmpDir = createTempDir();
+ AutoDelete delTmpDir(tmpDir, true);
+
+ runProgram("hg", true, { "archive", "-R", cacheDir, "-r", input->rev->gitRev(), tmpDir });
+
+ deletePath(tmpDir + "/.hg_archival.txt");
+
+ auto storePath = store->addToStore(name, tmpDir);
+
+ Attrs infoAttrs({
+ {"rev", input->rev->gitRev()},
+ {"revCount", (int64_t) revCount},
+ });
+
+ if (!this->rev)
+ getCache()->add(
+ store,
+ mutableAttrs,
+ infoAttrs,
+ storePath,
+ false);
+
+ getCache()->add(
+ store,
+ getImmutableAttrs(),
+ infoAttrs,
+ storePath,
+ true);
+
+ return makeResult(infoAttrs, std::move(storePath));
+ }
+};
+
+struct MercurialInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "hg+http" &&
+ url.scheme != "hg+https" &&
+ url.scheme != "hg+ssh" &&
+ url.scheme != "hg+file") return nullptr;
+
+ auto url2(url);
+ url2.scheme = std::string(url2.scheme, 3);
+ url2.query.clear();
+
+ Attrs attrs;
+ attrs.emplace("type", "hg");
+
+ for (auto &[name, value] : url.query) {
+ if (name == "rev" || name == "ref")
+ attrs.emplace(name, value);
+ else
+ url2.query.emplace(name, value);
+ }
+
+ attrs.emplace("url", url2.to_string());
+
+ return inputFromAttrs(attrs);
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "hg") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "ref" && name != "rev")
+ throw Error("unsupported Mercurial input attribute '%s'", name);
+
+ auto input = std::make_unique<MercurialInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto ref = maybeGetStrAttr(attrs, "ref")) {
+ if (!std::regex_match(*ref, refRegex))
+ throw BadURL("invalid Mercurial branch/tag name '%s'", *ref);
+ input->ref = *ref;
+ }
+ if (auto rev = maybeGetStrAttr(attrs, "rev"))
+ input->rev = Hash(*rev, htSHA1);
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
+
+}
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
new file mode 100644
index 000000000..ba2cc192e
--- /dev/null
+++ b/src/libfetchers/path.cc
@@ -0,0 +1,148 @@
+#include "fetchers.hh"
+#include "store-api.hh"
+
+namespace nix::fetchers {
+
+struct PathInput : Input
+{
+ Path path;
+
+ /* Allow the user to pass in "fake" tree info attributes. This is
+ useful for making a pinned tree work the same as the repository
+ from which is exported
+ (e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
+ std::optional<Hash> rev;
+ std::optional<uint64_t> revCount;
+ std::optional<time_t> lastModified;
+
+ std::string type() const override { return "path"; }
+
+ std::optional<Hash> getRev() const override { return rev; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const PathInput *>(&other);
+ return
+ other2
+ && path == other2->path
+ && rev == other2->rev
+ && revCount == other2->revCount
+ && lastModified == other2->lastModified;
+ }
+
+ bool isImmutable() const override
+ {
+ return (bool) narHash;
+ }
+
+ ParsedURL toURL() const override
+ {
+ auto query = attrsToQuery(toAttrsInternal());
+ query.erase("path");
+ return ParsedURL {
+ .scheme = "path",
+ .path = path,
+ .query = query,
+ };
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("path", path);
+ if (rev)
+ attrs.emplace("rev", rev->gitRev());
+ if (revCount)
+ attrs.emplace("revCount", *revCount);
+ if (lastModified)
+ attrs.emplace("lastModified", *lastModified);
+ return attrs;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto input = std::make_shared<PathInput>(*this);
+
+ // FIXME: check whether access to 'path' is allowed.
+
+ auto storePath = store->maybeParseStorePath(path);
+
+ if (storePath)
+ store->addTempRoot(*storePath);
+
+ if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath))
+ // FIXME: try to substitute storePath.
+ storePath = store->addToStore("source", path);
+
+ return
+ {
+ Tree {
+ .actualPath = store->toRealPath(*storePath),
+ .storePath = std::move(*storePath),
+ .info = TreeInfo {
+ .revCount = revCount,
+ .lastModified = lastModified
+ }
+ },
+ input
+ };
+ }
+
+};
+
+struct PathInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "path") return nullptr;
+
+ auto input = std::make_unique<PathInput>();
+ input->path = url.path;
+
+ for (auto & [name, value] : url.query)
+ if (name == "rev")
+ input->rev = Hash(value, htSHA1);
+ else if (name == "revCount") {
+ uint64_t revCount;
+ if (!string2Int(value, revCount))
+ throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
+ input->revCount = revCount;
+ }
+ else if (name == "lastModified") {
+ time_t lastModified;
+ if (!string2Int(value, lastModified))
+ throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
+ input->lastModified = lastModified;
+ }
+ else
+ throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "path") return {};
+
+ auto input = std::make_unique<PathInput>();
+ input->path = getStrAttr(attrs, "path");
+
+ for (auto & [name, value] : attrs)
+ if (name == "rev")
+ input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1);
+ else if (name == "revCount")
+ input->revCount = getIntAttr(attrs, "revCount");
+ else if (name == "lastModified")
+ input->lastModified = getIntAttr(attrs, "lastModified");
+ else if (name == "type" || name == "path")
+ ;
+ else
+ throw Error("unsupported path input attribute '%s'", name);
+
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
+
+}
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
new file mode 100644
index 000000000..695525b31
--- /dev/null
+++ b/src/libfetchers/tarball.cc
@@ -0,0 +1,275 @@
+#include "fetchers.hh"
+#include "cache.hh"
+#include "filetransfer.hh"
+#include "globals.hh"
+#include "store-api.hh"
+#include "archive.hh"
+#include "tarfile.hh"
+
+namespace nix::fetchers {
+
+DownloadFileResult downloadFile(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable)
+{
+ // FIXME: check store
+
+ Attrs inAttrs({
+ {"type", "file"},
+ {"url", url},
+ {"name", name},
+ });
+
+ auto cached = getCache()->lookupExpired(store, inAttrs);
+
+ auto useCached = [&]() -> DownloadFileResult
+ {
+ return {
+ .storePath = std::move(cached->storePath),
+ .etag = getStrAttr(cached->infoAttrs, "etag"),
+ .effectiveUrl = getStrAttr(cached->infoAttrs, "url")
+ };
+ };
+
+ if (cached && !cached->expired)
+ return useCached();
+
+ FileTransferRequest request(url);
+ if (cached)
+ request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
+ FileTransferResult res;
+ try {
+ res = getFileTransfer()->download(request);
+ } catch (FileTransferError & e) {
+ if (cached) {
+ warn("%s; using cached version", e.msg());
+ return useCached();
+ } else
+ throw;
+ }
+
+ // FIXME: write to temporary file.
+
+ Attrs infoAttrs({
+ {"etag", res.etag},
+ {"url", res.effectiveUri},
+ });
+
+ std::optional<StorePath> storePath;
+
+ if (res.cached) {
+ assert(cached);
+ assert(request.expectedETag == res.etag);
+ storePath = std::move(cached->storePath);
+ } else {
+ StringSink sink;
+ dumpString(*res.data, sink);
+ auto hash = hashString(htSHA256, *res.data);
+ ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
+ info.narHash = hashString(htSHA256, *sink.s);
+ info.narSize = sink.s->size();
+ info.ca = makeFixedOutputCA(false, hash);
+ store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
+ storePath = std::move(info.path);
+ }
+
+ getCache()->add(
+ store,
+ inAttrs,
+ infoAttrs,
+ *storePath,
+ immutable);
+
+ if (url != res.effectiveUri)
+ getCache()->add(
+ store,
+ {
+ {"type", "file"},
+ {"url", res.effectiveUri},
+ {"name", name},
+ },
+ infoAttrs,
+ *storePath,
+ immutable);
+
+ return {
+ .storePath = std::move(*storePath),
+ .etag = res.etag,
+ .effectiveUrl = res.effectiveUri,
+ };
+}
+
+Tree downloadTarball(
+ ref<Store> store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable)
+{
+ Attrs inAttrs({
+ {"type", "tarball"},
+ {"url", url},
+ {"name", name},
+ });
+
+ auto cached = getCache()->lookupExpired(store, inAttrs);
+
+ if (cached && !cached->expired)
+ return Tree {
+ .actualPath = store->toRealPath(cached->storePath),
+ .storePath = std::move(cached->storePath),
+ .info = TreeInfo {
+ .lastModified = getIntAttr(cached->infoAttrs, "lastModified"),
+ },
+ };
+
+ auto res = downloadFile(store, url, name, immutable);
+
+ std::optional<StorePath> unpackedStorePath;
+ time_t lastModified;
+
+ if (cached && res.etag != "" && getStrAttr(cached->infoAttrs, "etag") == res.etag) {
+ unpackedStorePath = std::move(cached->storePath);
+ lastModified = getIntAttr(cached->infoAttrs, "lastModified");
+ } else {
+ Path tmpDir = createTempDir();
+ AutoDelete autoDelete(tmpDir, true);
+ unpackTarfile(store->toRealPath(res.storePath), tmpDir);
+ auto members = readDirectory(tmpDir);
+ if (members.size() != 1)
+ throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
+ auto topDir = tmpDir + "/" + members.begin()->name;
+ lastModified = lstat(topDir).st_mtime;
+ unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
+ }
+
+ Attrs infoAttrs({
+ {"lastModified", lastModified},
+ {"etag", res.etag},
+ });
+
+ getCache()->add(
+ store,
+ inAttrs,
+ infoAttrs,
+ *unpackedStorePath,
+ immutable);
+
+ return Tree {
+ .actualPath = store->toRealPath(*unpackedStorePath),
+ .storePath = std::move(*unpackedStorePath),
+ .info = TreeInfo {
+ .lastModified = lastModified,
+ },
+ };
+}
+
+struct TarballInput : Input
+{
+ ParsedURL url;
+ std::optional<Hash> hash;
+
+ TarballInput(const ParsedURL & url) : url(url)
+ { }
+
+ std::string type() const override { return "tarball"; }
+
+ bool operator ==(const Input & other) const override
+ {
+ auto other2 = dynamic_cast<const TarballInput *>(&other);
+ return
+ other2
+ && to_string() == other2->to_string()
+ && hash == other2->hash;
+ }
+
+ bool isImmutable() const override
+ {
+ return hash || narHash;
+ }
+
+ ParsedURL toURL() const override
+ {
+ auto url2(url);
+ // NAR hashes are preferred over file hashes since tar/zip files
+ // don't have a canonical representation.
+ if (narHash)
+ url2.query.insert_or_assign("narHash", narHash->to_string(SRI));
+ else if (hash)
+ url2.query.insert_or_assign("hash", hash->to_string(SRI));
+ return url2;
+ }
+
+ Attrs toAttrsInternal() const override
+ {
+ Attrs attrs;
+ attrs.emplace("url", url.to_string());
+ if (hash)
+ attrs.emplace("hash", hash->to_string(SRI));
+ return attrs;
+ }
+
+ std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
+ {
+ auto tree = downloadTarball(store, url.to_string(), "source", false);
+
+ auto input = std::make_shared<TarballInput>(*this);
+ input->narHash = store->queryPathInfo(tree.storePath)->narHash;
+
+ return {std::move(tree), input};
+ }
+};
+
+struct TarballInputScheme : InputScheme
+{
+ std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
+ {
+ if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return nullptr;
+
+ if (!hasSuffix(url.path, ".zip")
+ && !hasSuffix(url.path, ".tar")
+ && !hasSuffix(url.path, ".tar.gz")
+ && !hasSuffix(url.path, ".tar.xz")
+ && !hasSuffix(url.path, ".tar.bz2"))
+ return nullptr;
+
+ auto input = std::make_unique<TarballInput>(url);
+
+ auto hash = input->url.query.find("hash");
+ if (hash != input->url.query.end()) {
+ // FIXME: require SRI hash.
+ input->hash = Hash(hash->second);
+ input->url.query.erase(hash);
+ }
+
+ auto narHash = input->url.query.find("narHash");
+ if (narHash != input->url.query.end()) {
+ // FIXME: require SRI hash.
+ input->narHash = Hash(narHash->second);
+ input->url.query.erase(narHash);
+ }
+
+ return input;
+ }
+
+ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
+ {
+ if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
+
+ for (auto & [name, value] : attrs)
+ if (name != "type" && name != "url" && name != "hash")
+ throw Error("unsupported tarball input attribute '%s'", name);
+
+ auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
+ if (auto hash = maybeGetStrAttr(attrs, "hash"))
+ // FIXME: require SRI hash.
+ input->hash = Hash(*hash);
+
+ return input;
+ }
+};
+
+static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
+
+}
diff --git a/src/libfetchers/tree-info.cc b/src/libfetchers/tree-info.cc
new file mode 100644
index 000000000..5788e94a1
--- /dev/null
+++ b/src/libfetchers/tree-info.cc
@@ -0,0 +1,14 @@
+#include "tree-info.hh"
+#include "store-api.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix::fetchers {
+
+StorePath TreeInfo::computeStorePath(Store & store) const
+{
+ assert(narHash);
+ return store.makeFixedOutputPath(true, narHash, "source");
+}
+
+}
diff --git a/src/libfetchers/tree-info.hh b/src/libfetchers/tree-info.hh
new file mode 100644
index 000000000..2c7347281
--- /dev/null
+++ b/src/libfetchers/tree-info.hh
@@ -0,0 +1,29 @@
+#pragma once
+
+#include "path.hh"
+#include "hash.hh"
+
+#include <nlohmann/json_fwd.hpp>
+
+namespace nix { class Store; }
+
+namespace nix::fetchers {
+
+struct TreeInfo
+{
+ Hash narHash;
+ std::optional<uint64_t> revCount;
+ std::optional<time_t> lastModified;
+
+ bool operator ==(const TreeInfo & other) const
+ {
+ return
+ narHash == other.narHash
+ && revCount == other.revCount
+ && lastModified == other.lastModified;
+ }
+
+ StorePath computeStorePath(Store & store) const;
+};
+
+}
diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc
index 9e1d7cee6..51e199ea5 100644
--- a/src/libmain/common-args.cc
+++ b/src/libmain/common-args.cc
@@ -6,43 +6,47 @@ namespace nix {
MixCommonArgs::MixCommonArgs(const string & programName)
: programName(programName)
{
- mkFlag()
- .longName("verbose")
- .shortName('v')
- .description("increase verbosity level")
- .handler([]() { verbosity = (Verbosity) (verbosity + 1); });
-
- mkFlag()
- .longName("quiet")
- .description("decrease verbosity level")
- .handler([]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; });
-
- mkFlag()
- .longName("debug")
- .description("enable debug output")
- .handler([]() { verbosity = lvlDebug; });
-
- mkFlag()
- .longName("option")
- .labels({"name", "value"})
- .description("set a Nix configuration option (overriding nix.conf)")
- .arity(2)
- .handler([](std::vector<std::string> ss) {
+ addFlag({
+ .longName = "verbose",
+ .shortName = 'v',
+ .description = "increase verbosity level",
+ .handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
+ });
+
+ addFlag({
+ .longName = "quiet",
+ .description = "decrease verbosity level",
+ .handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
+ });
+
+ addFlag({
+ .longName = "debug",
+ .description = "enable debug output",
+ .handler = {[]() { verbosity = lvlDebug; }},
+ });
+
+ addFlag({
+ .longName = "option",
+ .description = "set a Nix configuration option (overriding nix.conf)",
+ .labels = {"name", "value"},
+ .handler = {[](std::string name, std::string value) {
try {
- globalConfig.set(ss[0], ss[1]);
+ globalConfig.set(name, value);
} catch (UsageError & e) {
warn(e.what());
}
- });
-
- mkFlag()
- .longName("max-jobs")
- .shortName('j')
- .label("jobs")
- .description("maximum number of parallel builds")
- .handler([=](std::string s) {
+ }},
+ });
+
+ addFlag({
+ .longName = "max-jobs",
+ .shortName = 'j',
+ .description = "maximum number of parallel builds",
+ .labels = Strings{"jobs"},
+ .handler = {[=](std::string s) {
settings.set("max-jobs", s);
- });
+ }}
+ });
std::string cat = "config";
globalConfig.convertToArgs(*this, cat);
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index db1e0ba1d..b8c01c017 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -155,7 +155,7 @@ void initNix()
sshd). This breaks build users because they don't have access
to the TMPDIR, in particular in ‘nix-store --serve’. */
#if __APPLE__
- if (getuid() == 0 && hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
+ if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
unsetenv("TMPDIR");
#endif
}
@@ -165,28 +165,32 @@ LegacyArgs::LegacyArgs(const std::string & programName,
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg)
: MixCommonArgs(programName), parseArg(parseArg)
{
- mkFlag()
- .longName("no-build-output")
- .shortName('Q')
- .description("do not show build output")
- .set(&settings.verboseBuild, false);
-
- mkFlag()
- .longName("keep-failed")
- .shortName('K')
- .description("keep temporary directories of failed builds")
- .set(&(bool&) settings.keepFailed, true);
-
- mkFlag()
- .longName("keep-going")
- .shortName('k')
- .description("keep going after a build fails")
- .set(&(bool&) settings.keepGoing, true);
-
- mkFlag()
- .longName("fallback")
- .description("build from source if substitution fails")
- .set(&(bool&) settings.tryFallback, true);
+ addFlag({
+ .longName = "no-build-output",
+ .shortName = 'Q',
+ .description = "do not show build output",
+ .handler = {&settings.verboseBuild, false},
+ });
+
+ addFlag({
+ .longName = "keep-failed",
+ .shortName ='K',
+ .description = "keep temporary directories of failed builds",
+ .handler = {&(bool&) settings.keepFailed, true},
+ });
+
+ addFlag({
+ .longName = "keep-going",
+ .shortName ='k',
+ .description = "keep going after a build fails",
+ .handler = {&(bool&) settings.keepGoing, true},
+ });
+
+ addFlag({
+ .longName = "fallback",
+ .description = "build from source if substitution fails",
+ .handler = {&(bool&) settings.tryFallback, true},
+ });
auto intSettingAlias = [&](char shortName, const std::string & longName,
const std::string & description, const std::string & dest) {
@@ -205,11 +209,12 @@ LegacyArgs::LegacyArgs(const std::string & programName,
mkFlag(0, "no-gc-warning", "disable warning about not using '--add-root'",
&gcWarning, false);
- mkFlag()
- .longName("store")
- .label("store-uri")
- .description("URI of the Nix store to use")
- .dest(&(std::string&) settings.storeUri);
+ addFlag({
+ .longName = "store",
+ .description = "URI of the Nix store to use",
+ .labels = {"store-uri"},
+ .handler = {&(std::string&) settings.storeUri},
+ });
}
@@ -260,7 +265,10 @@ void printVersion(const string & programName)
cfg.push_back("signed-caches");
#endif
std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n";
- std::cout << "Configuration file: " << settings.nixConfDir + "/nix.conf" << "\n";
+ std::cout << "System configuration file: " << settings.nixConfDir + "/nix.conf" << "\n";
+ std::cout << "User configuration files: " <<
+ concatStringsSep(":", settings.nixUserConfFiles)
+ << "\n";
std::cout << "Store directory: " << settings.nixStore << "\n";
std::cout << "State directory: " << settings.nixStateDir << "\n";
}
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index a7d6e53b0..d6f3553d9 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -7,7 +7,7 @@
#include "affinity.hh"
#include "builtins.hh"
#include "builtins/buildenv.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "finally.hh"
#include "compression.hh"
#include "json.hh"
@@ -33,7 +33,6 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/utsname.h>
-#include <sys/select.h>
#include <sys/resource.h>
#include <sys/socket.h>
#include <sys/un.h>
@@ -43,6 +42,7 @@
#include <errno.h>
#include <cstring>
#include <termios.h>
+#include <poll.h>
#include <pwd.h>
#include <grp.h>
@@ -361,7 +361,7 @@ public:
{
actDerivations.progress(doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds);
actSubstitutions.progress(doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions);
- act.setExpected(actDownload, expectedDownloadSize + doneDownloadSize);
+ act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize);
act.setExpected(actCopyPath, expectedNarSize + doneNarSize);
}
};
@@ -1701,6 +1701,7 @@ void DerivationGoal::buildDone()
}
if (buildMode == bmCheck) {
+ deleteTmpDir(true);
done(BuildResult::Built);
return;
}
@@ -2187,7 +2188,7 @@ void DerivationGoal::startBuilder()
if (needsHashRewrite()) {
if (pathExists(homeDir))
- throw Error("directory '%1%' exists; please remove it", homeDir);
+ throw Error("home directory '%1%' exists; please remove it to assure purity of builds without sandboxing", homeDir);
/* We're not doing a chroot build, but we have some valid
output paths. Since we can't just overwrite or delete
@@ -2274,10 +2275,13 @@ void DerivationGoal::startBuilder()
if (chown(slaveName.c_str(), buildUser->getUID(), 0))
throw SysError("changing owner of pseudoterminal slave");
- } else {
+ }
+#if __APPLE__
+ else {
if (grantpt(builderOut.readSide.get()))
throw SysError("granting access to pseudoterminal slave");
}
+#endif
#if 0
// Mount the pt in the sandbox so that the "tty" command works.
@@ -3176,7 +3180,7 @@ void DerivationGoal::runChild()
// Only use nss functions to resolve hosts and
// services. Don’t use it for anything else that may
// be configured for this system. This limits the
- // potential impurities introduced in fixed outputs.
+ // potential impurities introduced in fixed-outputs.
writeFile(chrootRootDir + "/etc/nsswitch.conf", "hosts: files dns\nservices: files\n");
ss.push_back("/etc/services");
@@ -3561,6 +3565,29 @@ StorePathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv
}
+static void moveCheckToStore(const Path & src, const Path & dst)
+{
+ /* For the rename of directory to succeed, we must be running as root or
+ the directory must be made temporarily writable (to update the
+ directory's parent link ".."). */
+ struct stat st;
+ if (lstat(src.c_str(), &st) == -1) {
+ throw SysError(format("getting attributes of path '%1%'") % src);
+ }
+
+ bool changePerm = (geteuid() && S_ISDIR(st.st_mode) && !(st.st_mode & S_IWUSR));
+
+ if (changePerm)
+ chmod_(src, st.st_mode | S_IWUSR);
+
+ if (rename(src.c_str(), dst.c_str()))
+ throw SysError(format("renaming '%1%' to '%2%'") % src % dst);
+
+ if (changePerm)
+ chmod_(dst, st.st_mode);
+}
+
+
void DerivationGoal::registerOutputs()
{
/* When using a build hook, the build hook can register the output
@@ -3683,7 +3710,8 @@ void DerivationGoal::registerOutputs()
/* The output path should be a regular file without execute permission. */
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
throw BuildError(
- "output path '%1%' should be a non-executable regular file",
+ "output path '%1%' should be a non-executable regular file "
+ "since recursive hashing is not enabled (outputHashMode=flat)",
path);
}
@@ -3744,8 +3772,7 @@ void DerivationGoal::registerOutputs()
if (settings.runDiffHook || settings.keepFailed) {
Path dst = worker.store.toRealPath(path + checkSuffix);
deletePath(dst);
- if (rename(actualPath.c_str(), dst.c_str()))
- throw SysError("renaming '%1%' to '%2%'", actualPath, dst);
+ moveCheckToStore(actualPath, dst);
handleDiffHook(
buildUser ? buildUser->getUID() : getuid(),
@@ -3753,10 +3780,10 @@ void DerivationGoal::registerOutputs()
path, dst, worker.store.printStorePath(drvPath), tmpDir);
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs from '%s'",
- worker.store.printStorePath(drvPath), path, dst);
+ worker.store.printStorePath(drvPath), worker.store.toRealPath(path), dst);
} else
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs",
- worker.store.printStorePath(drvPath), path);
+ worker.store.printStorePath(drvPath), worker.store.toRealPath(path));
}
/* Since we verified the build, it's now ultimately trusted. */
@@ -4808,8 +4835,7 @@ void Worker::waitForInput()
terminated. */
bool useTimeout = false;
- struct timeval timeout;
- timeout.tv_usec = 0;
+ long timeout = 0;
auto before = steady_time_point::clock::now();
/* If we're monitoring for silence on stdout/stderr, or if there
@@ -4827,7 +4853,7 @@ void Worker::waitForInput()
nearest = std::min(nearest, i.timeStarted + std::chrono::seconds(settings.buildTimeout));
}
if (nearest != steady_time_point::max()) {
- timeout.tv_sec = std::max(1L, (long) std::chrono::duration_cast<std::chrono::seconds>(nearest - before).count());
+ timeout = std::max(1L, (long) std::chrono::duration_cast<std::chrono::seconds>(nearest - before).count());
useTimeout = true;
}
@@ -4838,30 +4864,28 @@ void Worker::waitForInput()
if (lastWokenUp == steady_time_point::min())
printError("waiting for locks or build slots...");
if (lastWokenUp == steady_time_point::min() || lastWokenUp > before) lastWokenUp = before;
- timeout.tv_sec = std::max(1L,
+ timeout = std::max(1L,
(long) std::chrono::duration_cast<std::chrono::seconds>(
lastWokenUp + std::chrono::seconds(settings.pollInterval) - before).count());
} else lastWokenUp = steady_time_point::min();
if (useTimeout)
- vomit("sleeping %d seconds", timeout.tv_sec);
+ vomit("sleeping %d seconds", timeout);
/* Use select() to wait for the input side of any logger pipe to
become `available'. Note that `available' (i.e., non-blocking)
includes EOF. */
- fd_set fds;
- FD_ZERO(&fds);
- int fdMax = 0;
+ std::vector<struct pollfd> pollStatus;
+ std::map <int, int> fdToPollStatus;
for (auto & i : children) {
for (auto & j : i.fds) {
- if (j >= FD_SETSIZE)
- throw Error("reached FD_SETSIZE limit");
- FD_SET(j, &fds);
- if (j >= fdMax) fdMax = j + 1;
+ pollStatus.push_back((struct pollfd) { .fd = j, .events = POLLIN });
+ fdToPollStatus[j] = pollStatus.size() - 1;
}
}
- if (select(fdMax, &fds, 0, 0, useTimeout ? &timeout : 0) == -1) {
+ if (poll(pollStatus.data(), pollStatus.size(),
+ useTimeout ? timeout * 1000 : -1) == -1) {
if (errno == EINTR) return;
throw SysError("waiting for input");
}
@@ -4882,7 +4906,7 @@ void Worker::waitForInput()
set<int> fds2(j->fds);
std::vector<unsigned char> buffer(4096);
for (auto & k : fds2) {
- if (FD_ISSET(k, &fds)) {
+ if (pollStatus.at(fdToPollStatus.at(k)).revents) {
ssize_t rd = read(k, buffer.data(), buffer.size());
// FIXME: is there a cleaner way to handle pt close
// than EIO? Is this even standard?
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index 223ed1fba..2048f8f87 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -1,5 +1,5 @@
#include "builtins.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "archive.hh"
#include "compression.hh"
@@ -26,9 +26,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
auto mainUrl = getAttr("url");
bool unpack = get(drv.env, "unpack").value_or("") == "1";
- /* Note: have to use a fresh downloader here because we're in
+ /* Note: have to use a fresh fileTransfer here because we're in
a forked process. */
- auto downloader = makeDownloader();
+ auto fileTransfer = makeFileTransfer();
auto fetch = [&](const std::string & url) {
@@ -36,13 +36,13 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
/* No need to do TLS verification, because we check the hash of
the result anyway. */
- DownloadRequest request(url);
+ FileTransferRequest request(url);
request.verifyTLS = false;
request.decompress = false;
auto decompressor = makeDecompressionSink(
unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink);
- downloader->download(std::move(request), *decompressor);
+ fileTransfer->download(std::move(request), *decompressor);
decompressor->finish();
});
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index f35b8cc43..852667468 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -378,7 +378,7 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
if (h == drvHashes.end()) {
assert(store.isValidPath(i.first));
h = drvHashes.insert_or_assign(i.first.clone(), hashDerivationModulo(store,
- readDerivation(store, store.toRealPath(store.printStorePath(i.first))), false)).first;
+ readDerivation(store, store.toRealPath(i.first)), false)).first;
}
inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second);
}
diff --git a/src/libstore/download.cc b/src/libstore/filetransfer.cc
index 60c6a80f3..3285edded 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/filetransfer.cc
@@ -1,14 +1,10 @@
-#include "download.hh"
+#include "filetransfer.hh"
#include "util.hh"
#include "globals.hh"
-#include "hash.hh"
#include "store-api.hh"
-#include "archive.hh"
#include "s3.hh"
#include "compression.hh"
-#include "pathlocks.hh"
#include "finally.hh"
-#include "tarfile.hh"
#ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
@@ -31,13 +27,9 @@ using namespace std::string_literals;
namespace nix {
-DownloadSettings downloadSettings;
+FileTransferSettings fileTransferSettings;
-static GlobalConfig::Register r1(&downloadSettings);
-
-CachedDownloadRequest::CachedDownloadRequest(const std::string & uri)
- : uri(uri), ttl(settings.tarballTtl)
-{ }
+static GlobalConfig::Register r1(&fileTransferSettings);
std::string resolveUri(const std::string & uri)
{
@@ -47,21 +39,21 @@ std::string resolveUri(const std::string & uri)
return uri;
}
-struct CurlDownloader : public Downloader
+struct curlFileTransfer : public FileTransfer
{
CURLM * curlm = 0;
std::random_device rd;
std::mt19937 mt19937;
- struct DownloadItem : public std::enable_shared_from_this<DownloadItem>
+ struct TransferItem : public std::enable_shared_from_this<TransferItem>
{
- CurlDownloader & downloader;
- DownloadRequest request;
- DownloadResult result;
+ curlFileTransfer & fileTransfer;
+ FileTransferRequest request;
+ FileTransferResult result;
Activity act;
bool done = false; // whether either the success or failure function has been called
- Callback<DownloadResult> callback;
+ Callback<FileTransferResult> callback;
CURL * req = 0;
bool active = false; // whether the handle has been added to the multi object
std::string status;
@@ -80,19 +72,26 @@ struct CurlDownloader : public Downloader
curl_off_t writtenToSink = 0;
- DownloadItem(CurlDownloader & downloader,
- const DownloadRequest & request,
- Callback<DownloadResult> && callback)
- : downloader(downloader)
+ TransferItem(curlFileTransfer & fileTransfer,
+ const FileTransferRequest & request,
+ Callback<FileTransferResult> && callback)
+ : fileTransfer(fileTransfer)
, request(request)
- , act(*logger, lvlTalkative, actDownload,
+ , act(*logger, lvlTalkative, actFileTransfer,
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
{request.uri}, request.parentAct)
, callback(std::move(callback))
, finalSink([this](const unsigned char * data, size_t len) {
if (this->request.dataCallback) {
- writtenToSink += len;
- this->request.dataCallback((char *) data, len);
+ long httpStatus = 0;
+ curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
+
+ /* Only write data to the sink if this is a
+ successful response. */
+ if (httpStatus == 0 || httpStatus == 200 || httpStatus == 201 || httpStatus == 206) {
+ writtenToSink += len;
+ this->request.dataCallback((char *) data, len);
+ }
} else
this->result.data->append((char *) data, len);
})
@@ -103,17 +102,17 @@ struct CurlDownloader : public Downloader
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
}
- ~DownloadItem()
+ ~TransferItem()
{
if (req) {
if (active)
- curl_multi_remove_handle(downloader.curlm, req);
+ curl_multi_remove_handle(fileTransfer.curlm, req);
curl_easy_cleanup(req);
}
if (requestHeaders) curl_slist_free_all(requestHeaders);
try {
if (!done)
- fail(DownloadError(Interrupted, "download of '%s' was interrupted", request.uri));
+ fail(FileTransferError(Interrupted, "download of '%s' was interrupted", request.uri));
} catch (...) {
ignoreException();
}
@@ -157,7 +156,7 @@ struct CurlDownloader : public Downloader
static size_t writeCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
{
- return ((DownloadItem *) userp)->writeCallback(contents, size, nmemb);
+ return ((TransferItem *) userp)->writeCallback(contents, size, nmemb);
}
size_t headerCallback(void * contents, size_t size, size_t nmemb)
@@ -199,7 +198,7 @@ struct CurlDownloader : public Downloader
static size_t headerCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
{
- return ((DownloadItem *) userp)->headerCallback(contents, size, nmemb);
+ return ((TransferItem *) userp)->headerCallback(contents, size, nmemb);
}
int progressCallback(double dltotal, double dlnow)
@@ -214,7 +213,7 @@ struct CurlDownloader : public Downloader
static int progressCallbackWrapper(void * userp, double dltotal, double dlnow, double ultotal, double ulnow)
{
- return ((DownloadItem *) userp)->progressCallback(dltotal, dlnow);
+ return ((TransferItem *) userp)->progressCallback(dltotal, dlnow);
}
static int debugCallback(CURL * handle, curl_infotype type, char * data, size_t size, void * userptr)
@@ -238,7 +237,7 @@ struct CurlDownloader : public Downloader
static size_t readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp)
{
- return ((DownloadItem *) userp)->readCallback(buffer, size, nitems);
+ return ((TransferItem *) userp)->readCallback(buffer, size, nitems);
}
void init()
@@ -249,7 +248,7 @@ struct CurlDownloader : public Downloader
if (verbosity >= lvlVomit) {
curl_easy_setopt(req, CURLOPT_VERBOSE, 1);
- curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, DownloadItem::debugCallback);
+ curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
}
curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str());
@@ -258,19 +257,19 @@ struct CurlDownloader : public Downloader
curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1);
curl_easy_setopt(req, CURLOPT_USERAGENT,
("curl/" LIBCURL_VERSION " Nix/" + nixVersion +
- (downloadSettings.userAgentSuffix != "" ? " " + downloadSettings.userAgentSuffix.get() : "")).c_str());
+ (fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")).c_str());
#if LIBCURL_VERSION_NUM >= 0x072b00
curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1);
#endif
#if LIBCURL_VERSION_NUM >= 0x072f00
- if (downloadSettings.enableHttp2)
+ if (fileTransferSettings.enableHttp2)
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS);
else
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
#endif
- curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, DownloadItem::writeCallbackWrapper);
+ curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, TransferItem::writeCallbackWrapper);
curl_easy_setopt(req, CURLOPT_WRITEDATA, this);
- curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, DownloadItem::headerCallbackWrapper);
+ curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, TransferItem::headerCallbackWrapper);
curl_easy_setopt(req, CURLOPT_HEADERDATA, this);
curl_easy_setopt(req, CURLOPT_PROGRESSFUNCTION, progressCallbackWrapper);
@@ -298,10 +297,10 @@ struct CurlDownloader : public Downloader
curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0);
}
- curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, downloadSettings.connectTimeout.get());
+ curl_easy_setopt(req, CURLOPT_CONNECTTIMEOUT, fileTransferSettings.connectTimeout.get());
curl_easy_setopt(req, CURLOPT_LOW_SPEED_LIMIT, 1L);
- curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, downloadSettings.stalledDownloadTimeout.get());
+ curl_easy_setopt(req, CURLOPT_LOW_SPEED_TIME, fileTransferSettings.stalledDownloadTimeout.get());
/* If no file exist in the specified path, curl continues to work
anyway as if netrc support was disabled. */
@@ -402,14 +401,14 @@ struct CurlDownloader : public Downloader
auto exc =
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
- ? DownloadError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
+ ? FileTransferError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
: httpStatus != 0
- ? DownloadError(err,
+ ? FileTransferError(err,
fmt("unable to %s '%s': HTTP error %d",
request.verb(), request.uri, httpStatus)
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
)
- : DownloadError(err,
+ : FileTransferError(err,
fmt("unable to %s '%s': %s (%d)",
request.verb(), request.uri, curl_easy_strerror(code), code));
@@ -423,13 +422,13 @@ struct CurlDownloader : public Downloader
|| writtenToSink == 0
|| (acceptRanges && encoding.empty())))
{
- int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(downloader.mt19937));
+ int ms = request.baseRetryTimeMs * std::pow(2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(fileTransfer.mt19937));
if (writtenToSink)
warn("%s; retrying from offset %d in %d ms", exc.what(), writtenToSink, ms);
else
warn("%s; retrying in %d ms", exc.what(), ms);
embargo = std::chrono::steady_clock::now() + std::chrono::milliseconds(ms);
- downloader.enqueueItem(shared_from_this());
+ fileTransfer.enqueueItem(shared_from_this());
}
else
fail(exc);
@@ -440,12 +439,12 @@ struct CurlDownloader : public Downloader
struct State
{
struct EmbargoComparator {
- bool operator() (const std::shared_ptr<DownloadItem> & i1, const std::shared_ptr<DownloadItem> & i2) {
+ bool operator() (const std::shared_ptr<TransferItem> & i1, const std::shared_ptr<TransferItem> & i2) {
return i1->embargo > i2->embargo;
}
};
bool quit = false;
- std::priority_queue<std::shared_ptr<DownloadItem>, std::vector<std::shared_ptr<DownloadItem>>, EmbargoComparator> incoming;
+ std::priority_queue<std::shared_ptr<TransferItem>, std::vector<std::shared_ptr<TransferItem>>, EmbargoComparator> incoming;
};
Sync<State> state_;
@@ -457,7 +456,7 @@ struct CurlDownloader : public Downloader
std::thread workerThread;
- CurlDownloader()
+ curlFileTransfer()
: mt19937(rd())
{
static std::once_flag globalInit;
@@ -470,7 +469,7 @@ struct CurlDownloader : public Downloader
#endif
#if LIBCURL_VERSION_NUM >= 0x071e00 // Max connections requires >= 7.30.0
curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS,
- downloadSettings.httpConnections.get());
+ fileTransferSettings.httpConnections.get());
#endif
wakeupPipe.create();
@@ -479,7 +478,7 @@ struct CurlDownloader : public Downloader
workerThread = std::thread([&]() { workerThreadEntry(); });
}
- ~CurlDownloader()
+ ~curlFileTransfer()
{
stopWorkerThread();
@@ -505,7 +504,7 @@ struct CurlDownloader : public Downloader
stopWorkerThread();
});
- std::map<CURL *, std::shared_ptr<DownloadItem>> items;
+ std::map<CURL *, std::shared_ptr<TransferItem>> items;
bool quit = false;
@@ -562,7 +561,7 @@ struct CurlDownloader : public Downloader
throw SysError("reading curl wakeup socket");
}
- std::vector<std::shared_ptr<DownloadItem>> incoming;
+ std::vector<std::shared_ptr<TransferItem>> incoming;
auto now = std::chrono::steady_clock::now();
{
@@ -615,7 +614,7 @@ struct CurlDownloader : public Downloader
}
}
- void enqueueItem(std::shared_ptr<DownloadItem> item)
+ void enqueueItem(std::shared_ptr<TransferItem> item)
{
if (item->request.data
&& !hasPrefix(item->request.uri, "http://")
@@ -647,8 +646,8 @@ struct CurlDownloader : public Downloader
}
#endif
- void enqueueDownload(const DownloadRequest & request,
- Callback<DownloadResult> callback) override
+ void enqueueFileTransfer(const FileTransferRequest & request,
+ Callback<FileTransferResult> callback) override
{
/* Ugly hack to support s3:// URIs. */
if (hasPrefix(request.uri, "s3://")) {
@@ -666,9 +665,9 @@ struct CurlDownloader : public Downloader
// FIXME: implement ETag
auto s3Res = s3Helper.getObject(bucketName, key);
- DownloadResult res;
+ FileTransferResult res;
if (!s3Res.data)
- throw DownloadError(NotFound, fmt("S3 object '%s' does not exist", request.uri));
+ throw FileTransferError(NotFound, fmt("S3 object '%s' does not exist", request.uri));
res.data = s3Res.data;
callback(std::move(res));
#else
@@ -678,26 +677,26 @@ struct CurlDownloader : public Downloader
return;
}
- enqueueItem(std::make_shared<DownloadItem>(*this, request, std::move(callback)));
+ enqueueItem(std::make_shared<TransferItem>(*this, request, std::move(callback)));
}
};
-ref<Downloader> getDownloader()
+ref<FileTransfer> getFileTransfer()
{
- static ref<Downloader> downloader = makeDownloader();
- return downloader;
+ static ref<FileTransfer> fileTransfer = makeFileTransfer();
+ return fileTransfer;
}
-ref<Downloader> makeDownloader()
+ref<FileTransfer> makeFileTransfer()
{
- return make_ref<CurlDownloader>();
+ return make_ref<curlFileTransfer>();
}
-std::future<DownloadResult> Downloader::enqueueDownload(const DownloadRequest & request)
+std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
{
- auto promise = std::make_shared<std::promise<DownloadResult>>();
- enqueueDownload(request,
- {[promise](std::future<DownloadResult> fut) {
+ auto promise = std::make_shared<std::promise<FileTransferResult>>();
+ enqueueFileTransfer(request,
+ {[promise](std::future<FileTransferResult> fut) {
try {
promise->set_value(fut.get());
} catch (...) {
@@ -707,15 +706,21 @@ std::future<DownloadResult> Downloader::enqueueDownload(const DownloadRequest &
return promise->get_future();
}
-DownloadResult Downloader::download(const DownloadRequest & request)
+FileTransferResult FileTransfer::download(const FileTransferRequest & request)
+{
+ return enqueueFileTransfer(request).get();
+}
+
+FileTransferResult FileTransfer::upload(const FileTransferRequest & request)
{
- return enqueueDownload(request).get();
+ /* Note: this method is the same as download, but helps in readability */
+ return enqueueFileTransfer(request).get();
}
-void Downloader::download(DownloadRequest && request, Sink & sink)
+void FileTransfer::download(FileTransferRequest && request, Sink & sink)
{
/* Note: we can't call 'sink' via request.dataCallback, because
- that would cause the sink to execute on the downloader
+ that would cause the sink to execute on the fileTransfer
thread. If 'sink' is a coroutine, this will fail. Also, if the
sink is expensive (e.g. one that does decompression and writing
to the Nix store), it would stall the download thread too much.
@@ -761,8 +766,8 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
state->avail.notify_one();
};
- enqueueDownload(request,
- {[_state](std::future<DownloadResult> fut) {
+ enqueueFileTransfer(request,
+ {[_state](std::future<FileTransferResult> fut) {
auto state(_state->lock());
state->quit = true;
try {
@@ -807,140 +812,6 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
}
}
-CachedDownloadResult Downloader::downloadCached(
- ref<Store> store, const CachedDownloadRequest & request)
-{
- auto url = resolveUri(request.uri);
-
- auto name = request.name;
- if (name == "") {
- auto p = url.rfind('/');
- if (p != string::npos) name = string(url, p + 1);
- }
-
- std::optional<StorePath> expectedStorePath;
- if (request.expectedHash) {
- expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name);
- if (store->isValidPath(*expectedStorePath)) {
- CachedDownloadResult result;
- result.storePath = store->printStorePath(*expectedStorePath);
- result.path = store->toRealPath(result.storePath);
- return result;
- }
- }
-
- Path cacheDir = getCacheDir() + "/nix/tarballs";
- createDirs(cacheDir);
-
- string urlHash = hashString(htSHA256, name + std::string("\0"s) + url).to_string(Base32, false);
-
- Path dataFile = cacheDir + "/" + urlHash + ".info";
- Path fileLink = cacheDir + "/" + urlHash + "-file";
-
- PathLocks lock({fileLink}, fmt("waiting for lock on '%1%'...", fileLink));
-
- std::optional<StorePath> storePath;
-
- string expectedETag;
-
- bool skip = false;
-
- CachedDownloadResult result;
-
- if (pathExists(fileLink) && pathExists(dataFile)) {
- storePath = store->parseStorePath(readLink(fileLink));
- // FIXME
- store->addTempRoot(*storePath);
- if (store->isValidPath(*storePath)) {
- auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
- if (ss.size() >= 3 && ss[0] == url) {
- time_t lastChecked;
- if (string2Int(ss[2], lastChecked) && (uint64_t) lastChecked + request.ttl >= (uint64_t) time(0)) {
- skip = true;
- result.effectiveUri = request.uri;
- result.etag = ss[1];
- } else if (!ss[1].empty()) {
- debug(format("verifying previous ETag '%1%'") % ss[1]);
- expectedETag = ss[1];
- }
- }
- } else
- storePath.reset();
- }
-
- if (!skip) {
-
- try {
- DownloadRequest request2(url);
- request2.expectedETag = expectedETag;
- auto res = download(request2);
- result.effectiveUri = res.effectiveUri;
- result.etag = res.etag;
-
- if (!res.cached) {
- StringSink sink;
- dumpString(*res.data, sink);
- Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data);
- ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
- info.narHash = hashString(htSHA256, *sink.s);
- info.narSize = sink.s->size();
- info.ca = makeFixedOutputCA(false, hash);
- store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
- storePath = info.path.clone();
- }
-
- assert(storePath);
- replaceSymlink(store->printStorePath(*storePath), fileLink);
-
- writeFile(dataFile, url + "\n" + res.etag + "\n" + std::to_string(time(0)) + "\n");
- } catch (DownloadError & e) {
- if (!storePath) throw;
- warn("warning: %s; using cached result", e.msg());
- result.etag = expectedETag;
- }
- }
-
- if (request.unpack) {
- Path unpackedLink = cacheDir + "/" + ((std::string) storePath->to_string()) + "-unpacked";
- PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
- std::optional<StorePath> unpackedStorePath;
- if (pathExists(unpackedLink)) {
- unpackedStorePath = store->parseStorePath(readLink(unpackedLink));
- // FIXME
- store->addTempRoot(*unpackedStorePath);
- if (!store->isValidPath(*unpackedStorePath))
- unpackedStorePath.reset();
- }
- if (!unpackedStorePath) {
- printInfo("unpacking '%s'...", url);
- Path tmpDir = createTempDir();
- AutoDelete autoDelete(tmpDir, true);
- unpackTarfile(store->toRealPath(store->printStorePath(*storePath)), tmpDir);
- auto members = readDirectory(tmpDir);
- if (members.size() != 1)
- throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
- auto topDir = tmpDir + "/" + members.begin()->name;
- unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
- }
- replaceSymlink(store->printStorePath(*unpackedStorePath), unpackedLink);
- storePath = std::move(*unpackedStorePath);
- }
-
- if (expectedStorePath && *storePath != *expectedStorePath) {
- unsigned int statusCode = 102;
- Hash gotHash = request.unpack
- ? hashPath(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath))).first
- : hashFile(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath)));
- throw nix::Error(statusCode, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
- url, request.expectedHash.to_string(), gotHash.to_string());
- }
-
- result.storePath = store->printStorePath(*storePath);
- result.path = store->toRealPath(result.storePath);
- return result;
-}
-
-
bool isUri(const string & s)
{
if (s.compare(0, 8, "channel:") == 0) return true;
diff --git a/src/libstore/download.hh b/src/libstore/filetransfer.hh
index 8141928d2..517b1a7d3 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/filetransfer.hh
@@ -9,7 +9,7 @@
namespace nix {
-struct DownloadSettings : Config
+struct FileTransferSettings : Config
{
Setting<bool> enableHttp2{this, true, "http2",
"Whether to enable HTTP/2 support."};
@@ -31,15 +31,15 @@ struct DownloadSettings : Config
"How often Nix will attempt to download a file before giving up."};
};
-extern DownloadSettings downloadSettings;
+extern FileTransferSettings fileTransferSettings;
-struct DownloadRequest
+struct FileTransferRequest
{
std::string uri;
std::string expectedETag;
bool verifyTLS = true;
bool head = false;
- size_t tries = downloadSettings.tries;
+ size_t tries = fileTransferSettings.tries;
unsigned int baseRetryTimeMs = 250;
ActivityId parentAct;
bool decompress = true;
@@ -47,7 +47,7 @@ struct DownloadRequest
std::string mimeType;
std::function<void(char *, size_t)> dataCallback;
- DownloadRequest(const std::string & uri)
+ FileTransferRequest(const std::string & uri)
: uri(uri), parentAct(getCurActivity()) { }
std::string verb()
@@ -56,7 +56,7 @@ struct DownloadRequest
}
};
-struct DownloadResult
+struct FileTransferResult
{
bool cached = false;
std::string etag;
@@ -65,76 +65,52 @@ struct DownloadResult
uint64_t bodySize = 0;
};
-struct CachedDownloadRequest
-{
- std::string uri;
- bool unpack = false;
- std::string name;
- Hash expectedHash;
- unsigned int ttl;
-
- CachedDownloadRequest(const std::string & uri);
- CachedDownloadRequest() = delete;
-};
-
-struct CachedDownloadResult
-{
- // Note: 'storePath' may be different from 'path' when using a
- // chroot store.
- Path storePath;
- Path path;
- std::optional<std::string> etag;
- std::string effectiveUri;
-};
-
class Store;
-struct Downloader
+struct FileTransfer
{
- virtual ~Downloader() { }
+ virtual ~FileTransfer() { }
- /* Enqueue a download request, returning a future to the result of
- the download. The future may throw a DownloadError
+ /* Enqueue a data transfer request, returning a future to the result of
+ the download. The future may throw a FileTransferError
exception. */
- virtual void enqueueDownload(const DownloadRequest & request,
- Callback<DownloadResult> callback) = 0;
+ virtual void enqueueFileTransfer(const FileTransferRequest & request,
+ Callback<FileTransferResult> callback) = 0;
- std::future<DownloadResult> enqueueDownload(const DownloadRequest & request);
+ std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request);
/* Synchronously download a file. */
- DownloadResult download(const DownloadRequest & request);
+ FileTransferResult download(const FileTransferRequest & request);
+
+ /* Synchronously upload a file. */
+ FileTransferResult upload(const FileTransferRequest & request);
/* Download a file, writing its data to a sink. The sink will be
invoked on the thread of the caller. */
- void download(DownloadRequest && request, Sink & sink);
-
- /* Check if the specified file is already in ~/.cache/nix/tarballs
- and is more recent than ‘tarball-ttl’ seconds. Otherwise,
- use the recorded ETag to verify if the server has a more
- recent version, and if so, download it to the Nix store. */
- CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request);
+ void download(FileTransferRequest && request, Sink & sink);
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
};
-/* Return a shared Downloader object. Using this object is preferred
+/* Return a shared FileTransfer object. Using this object is preferred
because it enables connection reuse and HTTP/2 multiplexing. */
-ref<Downloader> getDownloader();
+ref<FileTransfer> getFileTransfer();
-/* Return a new Downloader object. */
-ref<Downloader> makeDownloader();
+/* Return a new FileTransfer object. */
+ref<FileTransfer> makeFileTransfer();
-class DownloadError : public Error
+class FileTransferError : public Error
{
public:
- Downloader::Error error;
-
- template<typename... Args>
- DownloadError(Downloader::Error error, const Args & ... args)
- : Error(args...), error(error)
+ FileTransfer::Error error;
+ FileTransferError(FileTransfer::Error error, const Args & ... args)
+ : Error(fs), error(error)
{ }
};
bool isUri(const string & s);
+/* Resolve deprecated 'channel:<foo>' URLs. */
+std::string resolveUri(const std::string & uri);
+
}
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index 629c4360c..e99423257 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -206,6 +206,11 @@ void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
/* Read the `temproots' directory for per-process temporary root
files. */
for (auto & i : readDirectory(tempRootsDir)) {
+ if (i.name[0] == '.') {
+ // Ignore hidden files. Some package managers (notably portage) create
+ // those to keep the directory alive.
+ continue;
+ }
Path path = tempRootsDir + "/" + i.name;
pid_t pid = std::stoi(i.name);
@@ -418,7 +423,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
try {
auto mapFile = fmt("/proc/%s/maps", ent->d_name);
- auto mapLines = tokenizeString<std::vector<string>>(readFile(mapFile, true), "\n");
+ auto mapLines = tokenizeString<std::vector<string>>(readFile(mapFile), "\n");
for (const auto & line : mapLines) {
auto match = std::smatch{};
if (std::regex_match(line, match, mapRegex))
@@ -426,7 +431,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
}
auto envFile = fmt("/proc/%s/environ", ent->d_name);
- auto envString = readFile(envFile, true);
+ auto envString = readFile(envFile);
auto env_end = std::sregex_iterator{};
for (auto i = std::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i)
unchecked[i->str()].emplace(envFile);
@@ -888,7 +893,7 @@ void LocalStore::autoGC(bool sync)
if (statvfs(realStoreDir.c_str(), &st))
throw SysError("getting filesystem info about '%s'", realStoreDir);
- return (uint64_t) st.f_bavail * st.f_bsize;
+ return (uint64_t) st.f_bavail * st.f_frsize;
};
std::shared_future<void> future;
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 7e97f3c22..a0a2d850e 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -31,6 +31,7 @@ Settings::Settings()
, nixLogDir(canonPath(getEnv("NIX_LOG_DIR").value_or(NIX_LOG_DIR)))
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
+ , nixUserConfFiles(getUserConfigFiles())
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
@@ -77,11 +78,27 @@ void loadConfFile()
~/.nix/nix.conf or the command line. */
globalConfig.resetOverriden();
+ auto files = settings.nixUserConfFiles;
+ for (auto file = files.rbegin(); file != files.rend(); file++) {
+ globalConfig.applyConfigFile(*file);
+ }
+}
+
+std::vector<Path> getUserConfigFiles()
+{
+ // Use the paths specified in NIX_USER_CONF_FILES if it has been defined
+ auto nixConfFiles = getEnv("NIX_USER_CONF_FILES");
+ if (nixConfFiles.has_value()) {
+ return tokenizeString<std::vector<string>>(nixConfFiles.value(), ":");
+ }
+
+ // Use the paths specified by the XDG spec
+ std::vector<Path> files;
auto dirs = getConfigDirs();
- // Iterate over them in reverse so that the ones appearing first in the path take priority
- for (auto dir = dirs.rbegin(); dir != dirs.rend(); dir++) {
- globalConfig.applyConfigFile(*dir + "/nix/nix.conf");
+ for (auto & dir : dirs) {
+ files.insert(files.end(), dir + "/nix/nix.conf");
}
+ return files;
}
unsigned int Settings::getDefaultCores()
@@ -150,21 +167,24 @@ template<> void BaseSetting<SandboxMode>::toJSON(JSONPlaceholder & out)
template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category)
{
- args.mkFlag()
- .longName(name)
- .description("Enable sandboxing.")
- .handler([=](std::vector<std::string> ss) { override(smEnabled); })
- .category(category);
- args.mkFlag()
- .longName("no-" + name)
- .description("Disable sandboxing.")
- .handler([=](std::vector<std::string> ss) { override(smDisabled); })
- .category(category);
- args.mkFlag()
- .longName("relaxed-" + name)
- .description("Enable sandboxing, but allow builds to disable it.")
- .handler([=](std::vector<std::string> ss) { override(smRelaxed); })
- .category(category);
+ args.addFlag({
+ .longName = name,
+ .description = "Enable sandboxing.",
+ .category = category,
+ .handler = {[=]() { override(smEnabled); }}
+ });
+ args.addFlag({
+ .longName = "no-" + name,
+ .description = "Disable sandboxing.",
+ .category = category,
+ .handler = {[=]() { override(smDisabled); }}
+ });
+ args.addFlag({
+ .longName = "relaxed-" + name,
+ .description = "Enable sandboxing, but allow builds to disable it.",
+ .category = category,
+ .handler = {[=]() { override(smRelaxed); }}
+ });
}
void MaxBuildJobsSetting::set(const std::string & str)
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 3aa3653f3..da95fd3ae 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -53,9 +53,12 @@ public:
/* The directory where state is stored. */
Path nixStateDir;
- /* The directory where configuration files are stored. */
+ /* The directory where system configuration files are stored. */
Path nixConfDir;
+ /* A list of user configuration files to load. */
+ std::vector<Path> nixUserConfFiles;
+
/* The directory where internal helper programs are stored. */
Path nixLibexecDir;
@@ -351,12 +354,21 @@ public:
Setting<Paths> pluginFiles{this, {}, "plugin-files",
"Plugins to dynamically load at nix initialization time."};
+ Setting<std::string> githubAccessToken{this, "", "github-access-token",
+ "GitHub access token to get access to GitHub data through the GitHub API for github:<..> flakes."};
+
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
bool isExperimentalFeatureEnabled(const std::string & name);
void requireExperimentalFeature(const std::string & name);
+
+ Setting<bool> allowDirty{this, true, "allow-dirty",
+ "Whether to allow dirty Git/Mercurial trees."};
+
+ Setting<bool> warnDirty{this, true, "warn-dirty",
+ "Whether to warn about dirty Git/Mercurial trees."};
};
@@ -369,6 +381,9 @@ void initPlugins();
void loadConfFile();
+// Used by the Settings constructor
+std::vector<Path> getUserConfigFiles();
+
extern const string nixVersion;
}
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 011794c62..451a64785 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -1,5 +1,5 @@
#include "binary-cache-store.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "globals.hh"
#include "nar-info-disk-cache.hh"
@@ -85,14 +85,14 @@ protected:
checkEnabled();
try {
- DownloadRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(cacheUri + "/" + path);
request.head = true;
- getDownloader()->download(request);
+ getFileTransfer()->download(request);
return true;
- } catch (DownloadError & e) {
+ } catch (FileTransferError & e) {
/* S3 buckets return 403 if a file doesn't exist and the
bucket is unlistable, so treat 403 as 404. */
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
return false;
maybeDisable();
throw;
@@ -103,19 +103,19 @@ protected:
const std::string & data,
const std::string & mimeType) override
{
- auto req = DownloadRequest(cacheUri + "/" + path);
+ auto req = FileTransferRequest(cacheUri + "/" + path);
req.data = std::make_shared<string>(data); // FIXME: inefficient
req.mimeType = mimeType;
try {
- getDownloader()->download(req);
- } catch (DownloadError & e) {
+ getFileTransfer()->upload(req);
+ } catch (FileTransferError & e) {
throw UploadToHTTP("while uploading to HTTP binary cache at '%s': %s", cacheUri, e.msg());
}
}
- DownloadRequest makeRequest(const std::string & path)
+ FileTransferRequest makeRequest(const std::string & path)
{
- DownloadRequest request(cacheUri + "/" + path);
+ FileTransferRequest request(cacheUri + "/" + path);
return request;
}
@@ -124,9 +124,9 @@ protected:
checkEnabled();
auto request(makeRequest(path));
try {
- getDownloader()->download(std::move(request), sink);
- } catch (DownloadError & e) {
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ getFileTransfer()->download(std::move(request), sink);
+ } catch (FileTransferError & e) {
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri());
maybeDisable();
throw;
@@ -142,12 +142,12 @@ protected:
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
- getDownloader()->enqueueDownload(request,
- {[callbackPtr, this](std::future<DownloadResult> result) {
+ getFileTransfer()->enqueueFileTransfer(request,
+ {[callbackPtr, this](std::future<FileTransferResult> result) {
try {
(*callbackPtr)(result.get().data);
- } catch (DownloadError & e) {
- if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
+ } catch (FileTransferError & e) {
+ if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
return (*callbackPtr)(std::shared_ptr<std::string>());
maybeDisable();
callbackPtr->rethrow();
@@ -174,4 +174,3 @@ static RegisterStoreImplementation regStore([](
});
}
-
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 3175eb69b..427dd48ce 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -6,7 +6,7 @@
#include "nar-info-disk-cache.hh"
#include "globals.hh"
#include "compression.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "istringstream_nocopy.hh"
#include <aws/core/Aws.h>
@@ -136,7 +136,7 @@ ref<Aws::Client::ClientConfiguration> S3Helper::makeConfig(const string & region
return res;
}
-S3Helper::DownloadResult S3Helper::getObject(
+S3Helper::FileTransferResult S3Helper::getObject(
const std::string & bucketName, const std::string & key)
{
debug("fetching 's3://%s/%s'...", bucketName, key);
@@ -150,7 +150,7 @@ S3Helper::DownloadResult S3Helper::getObject(
return Aws::New<std::stringstream>("STRINGSTREAM");
});
- DownloadResult res;
+ FileTransferResult res;
auto now1 = std::chrono::steady_clock::now();
diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh
index ef5f23d0f..2042bffcf 100644
--- a/src/libstore/s3.hh
+++ b/src/libstore/s3.hh
@@ -18,13 +18,13 @@ struct S3Helper
ref<Aws::Client::ClientConfiguration> makeConfig(const std::string & region, const std::string & scheme, const std::string & endpoint);
- struct DownloadResult
+ struct FileTransferResult
{
std::shared_ptr<std::string> data;
unsigned int durationMs;
};
- DownloadResult getObject(
+ FileTransferResult getObject(
const std::string & bucketName, const std::string & key);
};
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index d1281d130..ee65009f8 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -6,6 +6,7 @@
#include "thread-pool.hh"
#include "json.hh"
#include "derivations.hh"
+#include "url.hh"
#include <future>
@@ -40,7 +41,7 @@ Path Store::followLinksToStore(std::string_view _path) const
path = absPath(target, dirOf(path));
}
if (!isInStore(path))
- throw Error("path '%1%' is not in the Nix store", path);
+ throw NotInStore("path '%1%' is not in the Nix store", path);
return path;
}
@@ -870,27 +871,7 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
Store::Params params;
auto q = uri.find('?');
if (q != std::string::npos) {
- for (auto s : tokenizeString<Strings>(uri.substr(q + 1), "&")) {
- auto e = s.find('=');
- if (e != std::string::npos) {
- auto value = s.substr(e + 1);
- std::string decoded;
- for (size_t i = 0; i < value.size(); ) {
- if (value[i] == '%') {
- if (i + 2 >= value.size())
- throw Error("invalid URI parameter '%s'", value);
- try {
- decoded += std::stoul(std::string(value, i + 1, 2), 0, 16);
- i += 3;
- } catch (...) {
- throw Error("invalid URI parameter '%s'", value);
- }
- } else
- decoded += value[i++];
- }
- params[s.substr(0, e)] = decoded;
- }
- }
+ params = decodeQuery(uri.substr(q + 1));
uri = uri_.substr(0, q);
}
return {uri, params};
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 0fa59be6a..81014763d 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -28,6 +28,7 @@ MakeError(InvalidPath, Error);
MakeError(Unsupported, Error);
MakeError(SubstituteGone, Error);
MakeError(SubstituterDisabled, Error);
+MakeError(NotInStore, Error);
struct BasicDerivation;
diff --git a/src/libutil/ansicolor.hh b/src/libutil/ansicolor.hh
index 390bd4d17..8ae07b092 100644
--- a/src/libutil/ansicolor.hh
+++ b/src/libutil/ansicolor.hh
@@ -1,13 +1,15 @@
-#pragma once
+#pragma once
+
+namespace nix {
+
+/* Some ANSI escape sequences. */
+#define ANSI_NORMAL "\e[0m"
+#define ANSI_BOLD "\e[1m"
+#define ANSI_FAINT "\e[2m"
+#define ANSI_ITALIC "\e[3m"
+#define ANSI_RED "\e[31;1m"
+#define ANSI_GREEN "\e[32;1m"
+#define ANSI_YELLOW "\e[33;1m"
+#define ANSI_BLUE "\e[34;1m"
-namespace nix
-{
- /* Some ANSI escape sequences. */
- #define ANSI_NORMAL "\e[0m"
- #define ANSI_BOLD "\e[1m"
- #define ANSI_FAINT "\e[2m"
- #define ANSI_RED "\e[31;1m"
- #define ANSI_GREEN "\e[32;1m"
- #define ANSI_YELLOW "\e[33;1m"
- #define ANSI_BLUE "\e[34;1m"
}
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 4e705dd64..423b99c96 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -3,16 +3,14 @@
namespace nix {
-Args::FlagMaker Args::mkFlag()
-{
- return FlagMaker(*this);
-}
-
-Args::FlagMaker::~FlagMaker()
+void Args::addFlag(Flag && flag_)
{
+ auto flag = std::make_shared<Flag>(std::move(flag_));
+ if (flag->handler.arity != ArityAny)
+ assert(flag->handler.arity == flag->labels.size());
assert(flag->longName != "");
- args.longFlags[flag->longName] = flag;
- if (flag->shortName) args.shortFlags[flag->shortName] = flag;
+ longFlags[flag->longName] = flag;
+ if (flag->shortName) shortFlags[flag->shortName] = flag;
}
void Args::parseCmdline(const Strings & _cmdline)
@@ -61,7 +59,7 @@ void Args::parseCmdline(const Strings & _cmdline)
void Args::printHelp(const string & programName, std::ostream & out)
{
- std::cout << "Usage: " << programName << " <FLAGS>...";
+ std::cout << fmt(ANSI_BOLD "Usage:" ANSI_NORMAL " %s " ANSI_ITALIC "FLAGS..." ANSI_NORMAL, programName);
for (auto & exp : expectedArgs) {
std::cout << renderLabels({exp.label});
// FIXME: handle arity > 1
@@ -72,11 +70,11 @@ void Args::printHelp(const string & programName, std::ostream & out)
auto s = description();
if (s != "")
- std::cout << "\nSummary: " << s << ".\n";
+ std::cout << "\n" ANSI_BOLD "Summary:" ANSI_NORMAL " " << s << ".\n";
if (longFlags.size()) {
std::cout << "\n";
- std::cout << "Flags:\n";
+ std::cout << ANSI_BOLD "Flags:" ANSI_NORMAL "\n";
printFlags(out);
}
}
@@ -101,16 +99,21 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
auto process = [&](const std::string & name, const Flag & flag) -> bool {
++pos;
std::vector<std::string> args;
- for (size_t n = 0 ; n < flag.arity; ++n) {
+ for (size_t n = 0 ; n < flag.handler.arity; ++n) {
if (pos == end) {
+<<<<<<< HEAD
if (flag.arity == ArityAny) break;
throw UsageError("flag '%1%' requires %2% argument(s)",
name,
flag.arity);
+=======
+ if (flag.handler.arity == ArityAny) break;
+ throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
+>>>>>>> master
}
args.push_back(*pos++);
}
- flag.handler(std::move(args));
+ flag.handler.fun(std::move(args));
return true;
};
@@ -158,17 +161,18 @@ bool Args::processArgs(const Strings & args, bool finish)
return res;
}
-Args::FlagMaker & Args::FlagMaker::mkHashTypeFlag(HashType * ht)
+Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
{
- arity(1);
- label("type");
- description("hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')");
- handler([ht](std::string s) {
- *ht = parseHashType(s);
- if (*ht == htUnknown)
- throw UsageError("unknown hash type '%1%'", s);
- });
- return *this;
+ return Flag {
+ .longName = std::move(longName),
+ .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
+ .labels = {"hash-algo"},
+ .handler = {[ht](std::string s) {
+ *ht = parseHashType(s);
+ if (*ht == htUnknown)
+ throw UsageError("unknown hash type '%1%'", s);
+ }}
+ };
}
Strings argvToStrings(int argc, char * * argv)
@@ -184,7 +188,7 @@ std::string renderLabels(const Strings & labels)
std::string res;
for (auto label : labels) {
for (auto & c : label) c = std::toupper(c);
- res += " <" + label + ">";
+ res += " " ANSI_ITALIC + label + ANSI_NORMAL;
}
return res;
}
@@ -193,10 +197,10 @@ void printTable(std::ostream & out, const Table2 & table)
{
size_t max = 0;
for (auto & row : table)
- max = std::max(max, row.first.size());
+ max = std::max(max, filterANSIEscapes(row.first, true).size());
for (auto & row : table) {
out << " " << row.first
- << std::string(max - row.first.size() + 2, ' ')
+ << std::string(max - filterANSIEscapes(row.first, true).size() + 2, ' ')
<< row.second << "\n";
}
}
@@ -207,8 +211,7 @@ void Command::printHelp(const string & programName, std::ostream & out)
auto exs = examples();
if (!exs.empty()) {
- out << "\n";
- out << "Examples:\n";
+ out << "\n" ANSI_BOLD "Examples:" ANSI_NORMAL "\n";
for (auto & ex : exs)
out << "\n"
<< " " << ex.description << "\n" // FIXME: wrap
@@ -224,49 +227,55 @@ MultiCommand::MultiCommand(const Commands & commands)
auto i = commands.find(ss[0]);
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", ss[0]);
- command = i->second();
- command->_name = ss[0];
+ command = {ss[0], i->second()};
}});
+
+ categories[Command::catDefault] = "Available commands";
}
void MultiCommand::printHelp(const string & programName, std::ostream & out)
{
if (command) {
- command->printHelp(programName + " " + command->name(), out);
+ command->second->printHelp(programName + " " + command->first, out);
return;
}
- out << "Usage: " << programName << " <COMMAND> <FLAGS>... <ARGS>...\n";
+ out << fmt(ANSI_BOLD "Usage:" ANSI_NORMAL " %s " ANSI_ITALIC "COMMAND FLAGS... ARGS..." ANSI_NORMAL "\n", programName);
- out << "\n";
- out << "Common flags:\n";
+ out << "\n" ANSI_BOLD "Common flags:" ANSI_NORMAL "\n";
printFlags(out);
- out << "\n";
- out << "Available commands:\n";
+ std::map<Command::Category, std::map<std::string, ref<Command>>> commandsByCategory;
- Table2 table;
- for (auto & i : commands) {
- auto command = i.second();
- command->_name = i.first;
- auto descr = command->description();
- if (!descr.empty())
- table.push_back(std::make_pair(command->name(), descr));
+ for (auto & [name, commandFun] : commands) {
+ auto command = commandFun();
+ commandsByCategory[command->category()].insert_or_assign(name, command);
+ }
+
+ for (auto & [category, commands] : commandsByCategory) {
+ out << fmt("\n" ANSI_BOLD "%s:" ANSI_NORMAL "\n", categories[category]);
+
+ Table2 table;
+ for (auto & [name, command] : commands) {
+ auto descr = command->description();
+ if (!descr.empty())
+ table.push_back(std::make_pair(name, descr));
+ }
+ printTable(out, table);
}
- printTable(out, table);
}
bool MultiCommand::processFlag(Strings::iterator & pos, Strings::iterator end)
{
if (Args::processFlag(pos, end)) return true;
- if (command && command->processFlag(pos, end)) return true;
+ if (command && command->second->processFlag(pos, end)) return true;
return false;
}
bool MultiCommand::processArgs(const Strings & args, bool finish)
{
if (command)
- return command->processArgs(args, finish);
+ return command->second->processArgs(args, finish);
else
return Args::processArgs(args, finish);
}
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 967efbe1c..1932e6a8a 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -32,13 +32,59 @@ protected:
struct Flag
{
typedef std::shared_ptr<Flag> ptr;
+
+ struct Handler
+ {
+ std::function<void(std::vector<std::string>)> fun;
+ size_t arity;
+
+ Handler() {}
+
+ Handler(std::function<void(std::vector<std::string>)> && fun)
+ : fun(std::move(fun))
+ , arity(ArityAny)
+ { }
+
+ Handler(std::function<void()> && handler)
+ : fun([handler{std::move(handler)}](std::vector<std::string>) { handler(); })
+ , arity(0)
+ { }
+
+ Handler(std::function<void(std::string)> && handler)
+ : fun([handler{std::move(handler)}](std::vector<std::string> ss) {
+ handler(std::move(ss[0]));
+ })
+ , arity(1)
+ { }
+
+ Handler(std::function<void(std::string, std::string)> && handler)
+ : fun([handler{std::move(handler)}](std::vector<std::string> ss) {
+ handler(std::move(ss[0]), std::move(ss[1]));
+ })
+ , arity(2)
+ { }
+
+ template<class T>
+ Handler(T * dest)
+ : fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
+ , arity(1)
+ { }
+
+ template<class T>
+ Handler(T * dest, const T & val)
+ : fun([=](std::vector<std::string> ss) { *dest = val; })
+ , arity(0)
+ { }
+ };
+
std::string longName;
char shortName = 0;
std::string description;
- Strings labels;
- size_t arity = 0;
- std::function<void(std::vector<std::string>)> handler;
std::string category;
+ Strings labels;
+ Handler handler;
+
+ static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
};
std::map<std::string, Flag::ptr> longFlags;
@@ -65,49 +111,7 @@ protected:
public:
- class FlagMaker
- {
- Args & args;
- Flag::ptr flag;
- friend class Args;
- FlagMaker(Args & args) : args(args), flag(std::make_shared<Flag>()) { }
- public:
- ~FlagMaker();
- FlagMaker & longName(const std::string & s) { flag->longName = s; return *this; }
- FlagMaker & shortName(char s) { flag->shortName = s; return *this; }
- FlagMaker & description(const std::string & s) { flag->description = s; return *this; }
- FlagMaker & label(const std::string & l) { flag->arity = 1; flag->labels = {l}; return *this; }
- FlagMaker & labels(const Strings & ls) { flag->arity = ls.size(); flag->labels = ls; return *this; }
- FlagMaker & arity(size_t arity) { flag->arity = arity; return *this; }
- FlagMaker & handler(std::function<void(std::vector<std::string>)> handler) { flag->handler = handler; return *this; }
- FlagMaker & handler(std::function<void()> handler) { flag->handler = [handler](std::vector<std::string>) { handler(); }; return *this; }
- FlagMaker & handler(std::function<void(std::string)> handler) {
- flag->arity = 1;
- flag->handler = [handler](std::vector<std::string> ss) { handler(std::move(ss[0])); };
- return *this;
- }
- FlagMaker & category(const std::string & s) { flag->category = s; return *this; }
-
- template<class T>
- FlagMaker & dest(T * dest)
- {
- flag->arity = 1;
- flag->handler = [=](std::vector<std::string> ss) { *dest = ss[0]; };
- return *this;
- }
-
- template<class T>
- FlagMaker & set(T * dest, const T & val)
- {
- flag->arity = 0;
- flag->handler = [=](std::vector<std::string> ss) { *dest = val; };
- return *this;
- }
-
- FlagMaker & mkHashTypeFlag(HashType * ht);
- };
-
- FlagMaker mkFlag();
+ void addFlag(Flag && flag);
/* Helper functions for constructing flags / positional
arguments. */
@@ -116,13 +120,13 @@ public:
const std::string & label, const std::string & description,
std::function<void(std::string)> fun)
{
- mkFlag()
- .shortName(shortName)
- .longName(longName)
- .labels({label})
- .description(description)
- .arity(1)
- .handler([=](std::vector<std::string> ss) { fun(ss[0]); });
+ addFlag({
+ .longName = longName,
+ .shortName = shortName,
+ .description = description,
+ .labels = {label},
+ .handler = {[=](std::string s) { fun(s); }}
+ });
}
void mkFlag(char shortName, const std::string & name,
@@ -135,11 +139,12 @@ public:
void mkFlag(char shortName, const std::string & longName, const std::string & description,
T * dest, const T & value)
{
- mkFlag()
- .shortName(shortName)
- .longName(longName)
- .description(description)
- .handler([=](std::vector<std::string> ss) { *dest = value; });
+ addFlag({
+ .longName = longName,
+ .shortName = shortName,
+ .description = description,
+ .handler = {[=]() { *dest = value; }}
+ });
}
template<class I>
@@ -155,18 +160,18 @@ public:
void mkFlag(char shortName, const std::string & longName,
const std::string & description, std::function<void(I)> fun)
{
- mkFlag()
- .shortName(shortName)
- .longName(longName)
- .labels({"N"})
- .description(description)
- .arity(1)
- .handler([=](std::vector<std::string> ss) {
+ addFlag({
+ .longName = longName,
+ .shortName = shortName,
+ .description = description,
+ .labels = {"N"},
+ .handler = {[=](std::string s) {
I n;
- if (!string2Int(ss[0], n))
+ if (!string2Int(s, n))
throw UsageError("flag '--%s' requires a integer argument", longName);
fun(n);
- });
+ }}
+ });
}
/* Expect a string argument. */
@@ -192,17 +197,10 @@ public:
run() method. */
struct Command : virtual Args
{
-private:
- std::string _name;
-
friend class MultiCommand;
-public:
-
virtual ~Command() { }
- std::string name() { return _name; }
-
virtual void prepare() { };
virtual void run() = 0;
@@ -216,6 +214,12 @@ public:
virtual Examples examples() { return Examples(); }
+ typedef int Category;
+
+ static constexpr Category catDefault = 0;
+
+ virtual Category category() { return catDefault; }
+
void printHelp(const string & programName, std::ostream & out) override;
};
@@ -228,7 +232,10 @@ class MultiCommand : virtual Args
public:
Commands commands;
- std::shared_ptr<Command> command;
+ std::map<Command::Category, std::string> categories;
+
+ // Selected command, if any.
+ std::optional<std::pair<std::string, ref<Command>>> command;
MultiCommand(const Commands & commands);
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 7551d97d1..f03e444ec 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -177,12 +177,13 @@ void BaseSetting<T>::toJSON(JSONPlaceholder & out)
template<typename T>
void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
{
- args.mkFlag()
- .longName(name)
- .description(description)
- .arity(1)
- .handler([=](std::vector<std::string> ss) { overriden = true; set(ss[0]); })
- .category(category);
+ args.addFlag({
+ .longName = name,
+ .description = description,
+ .category = category,
+ .labels = {"value"},
+ .handler = {[=](std::string s) { overriden = true; set(s); }},
+ });
}
template<> void BaseSetting<std::string>::set(const std::string & str)
@@ -227,16 +228,18 @@ template<> std::string BaseSetting<bool>::to_string() const
template<> void BaseSetting<bool>::convertToArg(Args & args, const std::string & category)
{
- args.mkFlag()
- .longName(name)
- .description(description)
- .handler([=](std::vector<std::string> ss) { override(true); })
- .category(category);
- args.mkFlag()
- .longName("no-" + name)
- .description(description)
- .handler([=](std::vector<std::string> ss) { override(false); })
- .category(category);
+ args.addFlag({
+ .longName = name,
+ .description = description,
+ .category = category,
+ .handler = {[=]() { override(true); }}
+ });
+ args.addFlag({
+ .longName = "no-" + name,
+ .description = description,
+ .category = category,
+ .handler = {[=]() { override(false); }}
+ });
}
template<> void BaseSetting<Strings>::set(const std::string & str)
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 4244d1221..7c75456ee 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -3,7 +3,7 @@
#include <atomic>
#include <nlohmann/json.hpp>
-#include <sstream>
+#include <iostream>
namespace nix {
@@ -25,6 +25,11 @@ void Logger::warn(const std::string & msg)
log(lvlWarn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg);
}
+void Logger::writeToStdout(std::string_view s)
+{
+ std::cout << s << "\n";
+}
+
class SimpleLogger : public Logger
{
public:
@@ -243,7 +248,7 @@ bool handleJSONLogMessage(const std::string & msg,
if (action == "start") {
auto type = (ActivityType) json["type"];
- if (trusted || type == actDownload)
+ if (trusted || type == actFileTransfer)
activities.emplace(std::piecewise_construct,
std::forward_as_tuple(json["id"]),
std::forward_as_tuple(*logger, (Verbosity) json["level"], type,
diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh
index 0c4980b83..bb7d356c8 100644
--- a/src/libutil/logging.hh
+++ b/src/libutil/logging.hh
@@ -7,7 +7,7 @@ namespace nix {
typedef enum {
actUnknown = 0,
actCopyPath = 100,
- actDownload = 101,
+ actFileTransfer = 101,
actRealise = 102,
actCopyPaths = 103,
actBuilds = 104,
@@ -76,6 +76,16 @@ public:
virtual void stopActivity(ActivityId act) { };
virtual void result(ActivityId act, ResultType type, const Fields & fields) { };
+
+ virtual void writeToStdout(std::string_view s);
+
+ template<typename... Args>
+ inline void stdout(const std::string & fs, const Args & ... args)
+ {
+ boost::format f(fs);
+ formatHelper(f, args...);
+ writeToStdout(f.str());
+ }
};
ActivityId getCurActivity();
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 5780c93a6..a04118512 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -148,6 +148,9 @@ struct StringSink : Sink
{
ref<std::string> s;
StringSink() : s(make_ref<std::string>()) { };
+ explicit StringSink(const size_t reservedSize) : s(make_ref<std::string>()) {
+ s->reserve(reservedSize);
+ };
StringSink(ref<std::string> s) : s(s) { };
void operator () (const unsigned char * data, size_t len) override;
};
diff --git a/src/libutil/tests/local.mk b/src/libutil/tests/local.mk
new file mode 100644
index 000000000..a297edb64
--- /dev/null
+++ b/src/libutil/tests/local.mk
@@ -0,0 +1,15 @@
+check: libutil-tests_RUN
+
+programs += libutil-tests
+
+libutil-tests_DIR := $(d)
+
+libutil-tests_INSTALL_DIR :=
+
+libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
+
+libutil-tests_CXXFLAGS += -I src/libutil
+
+libutil-tests_LIBS = libutil
+
+libutil-tests_LDFLAGS := $(GTEST_LIBS)
diff --git a/src/libutil/tests/tests.cc b/src/libutil/tests/tests.cc
new file mode 100644
index 000000000..5abfe2c9c
--- /dev/null
+++ b/src/libutil/tests/tests.cc
@@ -0,0 +1,585 @@
+#include "util.hh"
+#include "types.hh"
+
+#include <gtest/gtest.h>
+
+namespace nix {
+
+ /* ----------------------------------------------------------------------------
+ * absPath
+ * --------------------------------------------------------------------------*/
+
+ TEST(absPath, doesntChangeRoot) {
+ auto p = absPath("/");
+
+ ASSERT_EQ(p, "/");
+ }
+
+ TEST(absPath, turnsEmptyPathIntoCWD) {
+ char cwd[PATH_MAX+1];
+ auto p = absPath("");
+
+ ASSERT_EQ(p, getcwd((char*)&cwd, PATH_MAX));
+ }
+
+ TEST(absPath, usesOptionalBasePathWhenGiven) {
+ char _cwd[PATH_MAX+1];
+ char* cwd = getcwd((char*)&_cwd, PATH_MAX);
+
+ auto p = absPath("", cwd);
+
+ ASSERT_EQ(p, cwd);
+ }
+
+ TEST(absPath, isIdempotent) {
+ char _cwd[PATH_MAX+1];
+ char* cwd = getcwd((char*)&_cwd, PATH_MAX);
+ auto p1 = absPath(cwd);
+ auto p2 = absPath(p1);
+
+ ASSERT_EQ(p1, p2);
+ }
+
+
+ TEST(absPath, pathIsCanonicalised) {
+ auto path = "/some/path/with/trailing/dot/.";
+ auto p1 = absPath(path);
+ auto p2 = absPath(p1);
+
+ ASSERT_EQ(p1, "/some/path/with/trailing/dot");
+ ASSERT_EQ(p1, p2);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * canonPath
+ * --------------------------------------------------------------------------*/
+
+ TEST(canonPath, removesTrailingSlashes) {
+ auto path = "/this/is/a/path//";
+ auto p = canonPath(path);
+
+ ASSERT_EQ(p, "/this/is/a/path");
+ }
+
+ TEST(canonPath, removesDots) {
+ auto path = "/this/./is/a/path/./";
+ auto p = canonPath(path);
+
+ ASSERT_EQ(p, "/this/is/a/path");
+ }
+
+ TEST(canonPath, removesDots2) {
+ auto path = "/this/a/../is/a////path/foo/..";
+ auto p = canonPath(path);
+
+ ASSERT_EQ(p, "/this/is/a/path");
+ }
+
+ TEST(canonPath, requiresAbsolutePath) {
+ ASSERT_ANY_THROW(canonPath("."));
+ ASSERT_ANY_THROW(canonPath(".."));
+ ASSERT_ANY_THROW(canonPath("../"));
+ ASSERT_DEATH({ canonPath(""); }, "path != \"\"");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * dirOf
+ * --------------------------------------------------------------------------*/
+
+ TEST(dirOf, returnsEmptyStringForRoot) {
+ auto p = dirOf("/");
+
+ ASSERT_EQ(p, "/");
+ }
+
+ TEST(dirOf, returnsFirstPathComponent) {
+ auto p1 = dirOf("/dir/");
+ ASSERT_EQ(p1, "/dir");
+ auto p2 = dirOf("/dir");
+ ASSERT_EQ(p2, "/");
+ auto p3 = dirOf("/dir/..");
+ ASSERT_EQ(p3, "/dir");
+ auto p4 = dirOf("/dir/../");
+ ASSERT_EQ(p4, "/dir/..");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * baseNameOf
+ * --------------------------------------------------------------------------*/
+
+ TEST(baseNameOf, emptyPath) {
+ auto p1 = baseNameOf("");
+ ASSERT_EQ(p1, "");
+ }
+
+ TEST(baseNameOf, pathOnRoot) {
+ auto p1 = baseNameOf("/dir");
+ ASSERT_EQ(p1, "dir");
+ }
+
+ TEST(baseNameOf, relativePath) {
+ auto p1 = baseNameOf("dir/foo");
+ ASSERT_EQ(p1, "foo");
+ }
+
+ TEST(baseNameOf, pathWithTrailingSlashRoot) {
+ auto p1 = baseNameOf("/");
+ ASSERT_EQ(p1, "");
+ }
+
+ TEST(baseNameOf, trailingSlash) {
+ auto p1 = baseNameOf("/dir/");
+ ASSERT_EQ(p1, "dir");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * isInDir
+ * --------------------------------------------------------------------------*/
+
+ TEST(isInDir, trivialCase) {
+ auto p1 = isInDir("/foo/bar", "/foo");
+ ASSERT_EQ(p1, true);
+ }
+
+ TEST(isInDir, notInDir) {
+ auto p1 = isInDir("/zes/foo/bar", "/foo");
+ ASSERT_EQ(p1, false);
+ }
+
+ // XXX: hm, bug or feature? :) Looking at the implementation
+ // this might be problematic.
+ TEST(isInDir, emptyDir) {
+ auto p1 = isInDir("/zes/foo/bar", "");
+ ASSERT_EQ(p1, true);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * isDirOrInDir
+ * --------------------------------------------------------------------------*/
+
+ TEST(isDirOrInDir, trueForSameDirectory) {
+ ASSERT_EQ(isDirOrInDir("/nix", "/nix"), true);
+ ASSERT_EQ(isDirOrInDir("/", "/"), true);
+ }
+
+ TEST(isDirOrInDir, trueForEmptyPaths) {
+ ASSERT_EQ(isDirOrInDir("", ""), true);
+ }
+
+ TEST(isDirOrInDir, falseForDisjunctPaths) {
+ ASSERT_EQ(isDirOrInDir("/foo", "/bar"), false);
+ }
+
+ TEST(isDirOrInDir, relativePaths) {
+ ASSERT_EQ(isDirOrInDir("/foo/..", "/foo"), true);
+ }
+
+ // XXX: while it is possible to use "." or ".." in the
+ // first argument this doesn't seem to work in the second.
+ TEST(isDirOrInDir, DISABLED_shouldWork) {
+ ASSERT_EQ(isDirOrInDir("/foo/..", "/foo/."), true);
+
+ }
+
+ /* ----------------------------------------------------------------------------
+ * pathExists
+ * --------------------------------------------------------------------------*/
+
+ TEST(pathExists, rootExists) {
+ ASSERT_TRUE(pathExists("/"));
+ }
+
+ TEST(pathExists, cwdExists) {
+ ASSERT_TRUE(pathExists("."));
+ }
+
+ TEST(pathExists, bogusPathDoesNotExist) {
+ ASSERT_FALSE(pathExists("/home/schnitzel/darmstadt/pommes"));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * concatStringsSep
+ * --------------------------------------------------------------------------*/
+
+ TEST(concatStringsSep, buildCommaSeparatedString) {
+ Strings strings;
+ strings.push_back("this");
+ strings.push_back("is");
+ strings.push_back("great");
+
+ ASSERT_EQ(concatStringsSep(",", strings), "this,is,great");
+ }
+
+ TEST(concatStringsSep, buildStringWithEmptySeparator) {
+ Strings strings;
+ strings.push_back("this");
+ strings.push_back("is");
+ strings.push_back("great");
+
+ ASSERT_EQ(concatStringsSep("", strings), "thisisgreat");
+ }
+
+ TEST(concatStringsSep, buildSingleString) {
+ Strings strings;
+ strings.push_back("this");
+
+ ASSERT_EQ(concatStringsSep(",", strings), "this");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * hasPrefix
+ * --------------------------------------------------------------------------*/
+
+ TEST(hasPrefix, emptyStringHasNoPrefix) {
+ ASSERT_FALSE(hasPrefix("", "foo"));
+ }
+
+ TEST(hasPrefix, emptyStringIsAlwaysPrefix) {
+ ASSERT_TRUE(hasPrefix("foo", ""));
+ ASSERT_TRUE(hasPrefix("jshjkfhsadf", ""));
+ }
+
+ TEST(hasPrefix, trivialCase) {
+ ASSERT_TRUE(hasPrefix("foobar", "foo"));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * hasSuffix
+ * --------------------------------------------------------------------------*/
+
+ TEST(hasSuffix, emptyStringHasNoSuffix) {
+ ASSERT_FALSE(hasSuffix("", "foo"));
+ }
+
+ TEST(hasSuffix, trivialCase) {
+ ASSERT_TRUE(hasSuffix("foo", "foo"));
+ ASSERT_TRUE(hasSuffix("foobar", "bar"));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * base64Encode
+ * --------------------------------------------------------------------------*/
+
+ TEST(base64Encode, emptyString) {
+ ASSERT_EQ(base64Encode(""), "");
+ }
+
+ TEST(base64Encode, encodesAString) {
+ ASSERT_EQ(base64Encode("quod erat demonstrandum"), "cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0=");
+ }
+
+ TEST(base64Encode, encodeAndDecode) {
+ auto s = "quod erat demonstrandum";
+ auto encoded = base64Encode(s);
+ auto decoded = base64Decode(encoded);
+
+ ASSERT_EQ(decoded, s);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * base64Decode
+ * --------------------------------------------------------------------------*/
+
+ TEST(base64Decode, emptyString) {
+ ASSERT_EQ(base64Decode(""), "");
+ }
+
+ TEST(base64Decode, decodeAString) {
+ ASSERT_EQ(base64Decode("cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="), "quod erat demonstrandum");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * toLower
+ * --------------------------------------------------------------------------*/
+
+ TEST(toLower, emptyString) {
+ ASSERT_EQ(toLower(""), "");
+ }
+
+ TEST(toLower, nonLetters) {
+ auto s = "!@(*$#)(@#=\\234_";
+ ASSERT_EQ(toLower(s), s);
+ }
+
+ // std::tolower() doesn't handle unicode characters. In the context of
+ // store paths this isn't relevant but doesn't hurt to record this behavior
+ // here.
+ TEST(toLower, umlauts) {
+ auto s = "ÄÖÜ";
+ ASSERT_EQ(toLower(s), "ÄÖÜ");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * string2Float
+ * --------------------------------------------------------------------------*/
+
+ TEST(string2Float, emptyString) {
+ double n;
+ ASSERT_EQ(string2Float("", n), false);
+ }
+
+ TEST(string2Float, trivialConversions) {
+ double n;
+ ASSERT_EQ(string2Float("1.0", n), true);
+ ASSERT_EQ(n, 1.0);
+
+ ASSERT_EQ(string2Float("0.0", n), true);
+ ASSERT_EQ(n, 0.0);
+
+ ASSERT_EQ(string2Float("-100.25", n), true);
+ ASSERT_EQ(n, (-100.25));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * string2Int
+ * --------------------------------------------------------------------------*/
+
+ TEST(string2Int, emptyString) {
+ double n;
+ ASSERT_EQ(string2Int("", n), false);
+ }
+
+ TEST(string2Int, trivialConversions) {
+ double n;
+ ASSERT_EQ(string2Int("1", n), true);
+ ASSERT_EQ(n, 1);
+
+ ASSERT_EQ(string2Int("0", n), true);
+ ASSERT_EQ(n, 0);
+
+ ASSERT_EQ(string2Int("-100", n), true);
+ ASSERT_EQ(n, (-100));
+ }
+
+ /* ----------------------------------------------------------------------------
+ * statusOk
+ * --------------------------------------------------------------------------*/
+
+ TEST(statusOk, zeroIsOk) {
+ ASSERT_EQ(statusOk(0), true);
+ ASSERT_EQ(statusOk(1), false);
+ }
+
+
+ /* ----------------------------------------------------------------------------
+ * rewriteStrings
+ * --------------------------------------------------------------------------*/
+
+ TEST(rewriteStrings, emptyString) {
+ StringMap rewrites;
+ rewrites["this"] = "that";
+
+ ASSERT_EQ(rewriteStrings("", rewrites), "");
+ }
+
+ TEST(rewriteStrings, emptyRewrites) {
+ StringMap rewrites;
+
+ ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
+ }
+
+ TEST(rewriteStrings, successfulRewrite) {
+ StringMap rewrites;
+ rewrites["this"] = "that";
+
+ ASSERT_EQ(rewriteStrings("this and that", rewrites), "that and that");
+ }
+
+ TEST(rewriteStrings, doesntOccur) {
+ StringMap rewrites;
+ rewrites["foo"] = "bar";
+
+ ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * replaceStrings
+ * --------------------------------------------------------------------------*/
+
+ TEST(replaceStrings, emptyString) {
+ ASSERT_EQ(replaceStrings("", "this", "that"), "");
+ ASSERT_EQ(replaceStrings("this and that", "", ""), "this and that");
+ }
+
+ TEST(replaceStrings, successfulReplace) {
+ ASSERT_EQ(replaceStrings("this and that", "this", "that"), "that and that");
+ }
+
+ TEST(replaceStrings, doesntOccur) {
+ ASSERT_EQ(replaceStrings("this and that", "foo", "bar"), "this and that");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * trim
+ * --------------------------------------------------------------------------*/
+
+ TEST(trim, emptyString) {
+ ASSERT_EQ(trim(""), "");
+ }
+
+ TEST(trim, removesWhitespace) {
+ ASSERT_EQ(trim("foo"), "foo");
+ ASSERT_EQ(trim(" foo "), "foo");
+ ASSERT_EQ(trim(" foo bar baz"), "foo bar baz");
+ ASSERT_EQ(trim(" \t foo bar baz\n"), "foo bar baz");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * chomp
+ * --------------------------------------------------------------------------*/
+
+ TEST(chomp, emptyString) {
+ ASSERT_EQ(chomp(""), "");
+ }
+
+ TEST(chomp, removesWhitespace) {
+ ASSERT_EQ(chomp("foo"), "foo");
+ ASSERT_EQ(chomp("foo "), "foo");
+ ASSERT_EQ(chomp(" foo "), " foo");
+ ASSERT_EQ(chomp(" foo bar baz "), " foo bar baz");
+ ASSERT_EQ(chomp("\t foo bar baz\n"), "\t foo bar baz");
+ }
+
+ /* ----------------------------------------------------------------------------
+ * quoteStrings
+ * --------------------------------------------------------------------------*/
+
+ TEST(quoteStrings, empty) {
+ Strings s = { };
+ Strings expected = { };
+
+ ASSERT_EQ(quoteStrings(s), expected);
+ }
+
+ TEST(quoteStrings, emptyStrings) {
+ Strings s = { "", "", "" };
+ Strings expected = { "''", "''", "''" };
+ ASSERT_EQ(quoteStrings(s), expected);
+
+ }
+
+ TEST(quoteStrings, trivialQuote) {
+ Strings s = { "foo", "bar", "baz" };
+ Strings expected = { "'foo'", "'bar'", "'baz'" };
+
+ ASSERT_EQ(quoteStrings(s), expected);
+ }
+
+ TEST(quoteStrings, quotedStrings) {
+ Strings s = { "'foo'", "'bar'", "'baz'" };
+ Strings expected = { "''foo''", "''bar''", "''baz''" };
+
+ ASSERT_EQ(quoteStrings(s), expected);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * tokenizeString
+ * --------------------------------------------------------------------------*/
+
+ TEST(tokenizeString, empty) {
+ Strings expected = { };
+
+ ASSERT_EQ(tokenizeString<Strings>(""), expected);
+ }
+
+ TEST(tokenizeString, tokenizeSpacesWithDefaults) {
+ auto s = "foo bar baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsWithDefaults) {
+ auto s = "foo\tbar\tbaz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsSpacesWithDefaults) {
+ auto s = "foo\t bar\t baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsSpacesNewlineWithDefaults) {
+ auto s = "foo\t\n bar\t\n baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+ }
+
+ TEST(tokenizeString, tokenizeTabsSpacesNewlineRetWithDefaults) {
+ auto s = "foo\t\n\r bar\t\n\r baz";
+ Strings expected = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s), expected);
+
+ auto s2 = "foo \t\n\r bar \t\n\r baz";
+ Strings expected2 = { "foo", "bar", "baz" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s2), expected2);
+ }
+
+ TEST(tokenizeString, tokenizeWithCustomSep) {
+ auto s = "foo\n,bar\n,baz\n";
+ Strings expected = { "foo\n", "bar\n", "baz\n" };
+
+ ASSERT_EQ(tokenizeString<Strings>(s, ","), expected);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * get
+ * --------------------------------------------------------------------------*/
+
+ TEST(get, emptyContainer) {
+ StringMap s = { };
+ auto expected = std::nullopt;
+
+ ASSERT_EQ(get(s, "one"), expected);
+ }
+
+ TEST(get, getFromContainer) {
+ StringMap s;
+ s["one"] = "yi";
+ s["two"] = "er";
+ auto expected = "yi";
+
+ ASSERT_EQ(get(s, "one"), expected);
+ }
+
+ /* ----------------------------------------------------------------------------
+ * filterANSIEscapes
+ * --------------------------------------------------------------------------*/
+
+ TEST(filterANSIEscapes, emptyString) {
+ auto s = "";
+ auto expected = "";
+
+ ASSERT_EQ(filterANSIEscapes(s), expected);
+ }
+
+ TEST(filterANSIEscapes, doesntChangePrintableChars) {
+ auto s = "09 2q304ruyhr slk2-19024 kjsadh sar f";
+
+ ASSERT_EQ(filterANSIEscapes(s), s);
+ }
+
+ TEST(filterANSIEscapes, filtersColorCodes) {
+ auto s = "\u001b[30m A \u001b[31m B \u001b[32m C \u001b[33m D \u001b[0m";
+
+ ASSERT_EQ(filterANSIEscapes(s, true, 2), " A" );
+ ASSERT_EQ(filterANSIEscapes(s, true, 3), " A " );
+ ASSERT_EQ(filterANSIEscapes(s, true, 4), " A " );
+ ASSERT_EQ(filterANSIEscapes(s, true, 5), " A B" );
+ ASSERT_EQ(filterANSIEscapes(s, true, 8), " A B C" );
+ }
+
+ TEST(filterANSIEscapes, expandsTabs) {
+ auto s = "foo\tbar\tbaz";
+
+ ASSERT_EQ(filterANSIEscapes(s, true), "foo bar baz" );
+ }
+
+}
diff --git a/src/libutil/types.hh b/src/libutil/types.hh
index f11256f61..a831b9924 100644
--- a/src/libutil/types.hh
+++ b/src/libutil/types.hh
@@ -25,4 +25,12 @@ typedef list<Path> Paths;
typedef set<Path> PathSet;
+/* Helper class to run code at startup. */
+template<typename T>
+struct OnStartup
+{
+ OnStartup(T && t) { t(); }
+};
+
+
}
diff --git a/src/libutil/url.cc b/src/libutil/url.cc
new file mode 100644
index 000000000..5d5328e5d
--- /dev/null
+++ b/src/libutil/url.cc
@@ -0,0 +1,137 @@
+#include "url.hh"
+#include "util.hh"
+
+namespace nix {
+
+std::regex refRegex(refRegexS, std::regex::ECMAScript);
+std::regex revRegex(revRegexS, std::regex::ECMAScript);
+std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
+
+ParsedURL parseURL(const std::string & url)
+{
+ static std::regex uriRegex(
+ "((" + schemeRegex + "):"
+ + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))"
+ + "(?:\\?(" + queryRegex + "))?"
+ + "(?:#(" + queryRegex + "))?",
+ std::regex::ECMAScript);
+
+ std::smatch match;
+
+ if (std::regex_match(url, match, uriRegex)) {
+ auto & base = match[1];
+ std::string scheme = match[2];
+ auto authority = match[3].matched
+ ? std::optional<std::string>(match[3]) : std::nullopt;
+ std::string path = match[4].matched ? match[4] : match[5];
+ auto & query = match[6];
+ auto & fragment = match[7];
+
+ auto isFile = scheme.find("file") != std::string::npos;
+
+ if (authority && *authority != "" && isFile)
+ throw Error("file:// URL '%s' has unexpected authority '%s'",
+ url, *authority);
+
+ if (isFile && path.empty())
+ path = "/";
+
+ return ParsedURL{
+ .url = url,
+ .base = base,
+ .scheme = scheme,
+ .authority = authority,
+ .path = path,
+ .query = decodeQuery(query),
+ .fragment = percentDecode(std::string(fragment))
+ };
+ }
+
+ else
+ throw BadURL("'%s' is not a valid URL", url);
+}
+
+std::string percentDecode(std::string_view in)
+{
+ std::string decoded;
+ for (size_t i = 0; i < in.size(); ) {
+ if (in[i] == '%') {
+ if (i + 2 >= in.size())
+ throw BadURL("invalid URI parameter '%s'", in);
+ try {
+ decoded += std::stoul(std::string(in, i + 1, 2), 0, 16);
+ i += 3;
+ } catch (...) {
+ throw BadURL("invalid URI parameter '%s'", in);
+ }
+ } else
+ decoded += in[i++];
+ }
+ return decoded;
+}
+
+std::map<std::string, std::string> decodeQuery(const std::string & query)
+{
+ std::map<std::string, std::string> result;
+
+ for (auto s : tokenizeString<Strings>(query, "&")) {
+ auto e = s.find('=');
+ if (e != std::string::npos)
+ result.emplace(
+ s.substr(0, e),
+ percentDecode(std::string_view(s).substr(e + 1)));
+ }
+
+ return result;
+}
+
+std::string percentEncode(std::string_view s)
+{
+ std::string res;
+ for (auto & c : s)
+ if ((c >= 'a' && c <= 'z')
+ || (c >= 'A' && c <= 'Z')
+ || (c >= '0' && c <= '9')
+ || strchr("-._~!$&'()*+,;=:@", c))
+ res += c;
+ else
+ res += fmt("%%%02x", (unsigned int) c);
+ return res;
+}
+
+std::string encodeQuery(const std::map<std::string, std::string> & ss)
+{
+ std::string res;
+ bool first = true;
+ for (auto & [name, value] : ss) {
+ if (!first) res += '&';
+ first = false;
+ res += percentEncode(name);
+ res += '=';
+ res += percentEncode(value);
+ }
+ return res;
+}
+
+std::string ParsedURL::to_string() const
+{
+ return
+ scheme
+ + ":"
+ + (authority ? "//" + *authority : "")
+ + path
+ + (query.empty() ? "" : "?" + encodeQuery(query))
+ + (fragment.empty() ? "" : "#" + percentEncode(fragment));
+}
+
+bool ParsedURL::operator ==(const ParsedURL & other) const
+{
+ return
+ scheme == other.scheme
+ && authority == other.authority
+ && path == other.path
+ && query == other.query
+ && fragment == other.fragment;
+}
+
+}
diff --git a/src/libutil/url.hh b/src/libutil/url.hh
new file mode 100644
index 000000000..1503023a2
--- /dev/null
+++ b/src/libutil/url.hh
@@ -0,0 +1,62 @@
+#pragma once
+
+#include "types.hh"
+
+#include <regex>
+
+namespace nix {
+
+struct ParsedURL
+{
+ std::string url;
+ std::string base; // URL without query/fragment
+ std::string scheme;
+ std::optional<std::string> authority;
+ std::string path;
+ std::map<std::string, std::string> query;
+ std::string fragment;
+
+ std::string to_string() const;
+
+ bool operator ==(const ParsedURL & other) const;
+};
+
+MakeError(BadURL, Error);
+
+std::string percentDecode(std::string_view in);
+
+std::map<std::string, std::string> decodeQuery(const std::string & query);
+
+ParsedURL parseURL(const std::string & url);
+
+// URI stuff.
+const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
+const static std::string schemeRegex = "(?:[a-z+]+)";
+const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
+const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
+const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
+const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
+const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
+const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
+const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
+const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
+const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
+const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
+const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
+const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
+
+// A Git ref (i.e. branch or tag name).
+const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
+extern std::regex refRegex;
+
+// A Git revision (a SHA-1 commit hash).
+const static std::string revRegexS = "[0-9a-fA-F]{40}";
+extern std::regex revRegex;
+
+// A ref or revision, or a ref followed by a revision.
+const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
+
+const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
+extern std::regex flakeIdRegex;
+
+}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index e8f22ab71..8207ff701 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -250,16 +250,13 @@ bool isLink(const Path & path)
}
-DirEntries readDirectory(const Path & path)
+DirEntries readDirectory(DIR *dir, const Path & path)
{
DirEntries entries;
entries.reserve(64);
- AutoCloseDir dir(opendir(path.c_str()));
- if (!dir) throw SysError("opening directory '%1%'", path);
-
struct dirent * dirent;
- while (errno = 0, dirent = readdir(dir.get())) { /* sic */
+ while (errno = 0, dirent = readdir(dir)) { /* sic */
checkInterrupt();
string name = dirent->d_name;
if (name == "." || name == "..") continue;
@@ -276,6 +273,14 @@ DirEntries readDirectory(const Path & path)
return entries;
}
+DirEntries readDirectory(const Path & path)
+{
+ AutoCloseDir dir(opendir(path.c_str()));
+ if (!dir) throw SysError(format("opening directory '%1%'") % path);
+
+ return readDirectory(dir.get(), path);
+}
+
unsigned char getFileType(const Path & path)
{
@@ -293,19 +298,16 @@ string readFile(int fd)
if (fstat(fd, &st) == -1)
throw SysError("statting file");
- std::vector<unsigned char> buf(st.st_size);
- readFull(fd, buf.data(), st.st_size);
-
- return string((char *) buf.data(), st.st_size);
+ return drainFD(fd, true, st.st_size);
}
-string readFile(const Path & path, bool drain)
+string readFile(const Path & path)
{
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
if (!fd)
- throw SysError("opening file '%1%'", path);
- return drain ? drainFD(fd.get()) : readFile(fd.get());
+ throw SysError(format("opening file '%1%'") % path);
+ return readFile(fd.get());
}
@@ -372,12 +374,14 @@ void writeLine(int fd, string s)
}
-static void _deletePath(const Path & path, unsigned long long & bytesFreed)
+static void _deletePath(int parentfd, const Path & path, unsigned long long & bytesFreed)
{
checkInterrupt();
+ string name(baseNameOf(path));
+
struct stat st;
- if (lstat(path.c_str(), &st) == -1) {
+ if (fstatat(parentfd, name.c_str(), &st, AT_SYMLINK_NOFOLLOW) == -1) {
if (errno == ENOENT) return;
throw SysError("getting status of '%1%'", path);
}
@@ -389,20 +393,45 @@ static void _deletePath(const Path & path, unsigned long long & bytesFreed)
/* Make the directory accessible. */
const auto PERM_MASK = S_IRUSR | S_IWUSR | S_IXUSR;
if ((st.st_mode & PERM_MASK) != PERM_MASK) {
- if (chmod(path.c_str(), st.st_mode | PERM_MASK) == -1)
- throw SysError("chmod '%1%'", path);
+ if (fchmodat(parentfd, name.c_str(), st.st_mode | PERM_MASK, 0) == -1)
+ throw SysError(format("chmod '%1%'") % path);
}
- for (auto & i : readDirectory(path))
- _deletePath(path + "/" + i.name, bytesFreed);
+ int fd = openat(parentfd, path.c_str(), O_RDONLY);
+ if (!fd)
+ throw SysError(format("opening directory '%1%'") % path);
+ AutoCloseDir dir(fdopendir(fd));
+ if (!dir)
+ throw SysError(format("opening directory '%1%'") % path);
+ for (auto & i : readDirectory(dir.get(), path))
+ _deletePath(dirfd(dir.get()), path + "/" + i.name, bytesFreed);
}
- if (remove(path.c_str()) == -1) {
+ int flags = S_ISDIR(st.st_mode) ? AT_REMOVEDIR : 0;
+ if (unlinkat(parentfd, name.c_str(), flags) == -1) {
if (errno == ENOENT) return;
throw SysError("cannot unlink '%1%'", path);
}
}
+static void _deletePath(const Path & path, unsigned long long & bytesFreed)
+{
+ Path dir = dirOf(path);
+ if (dir == "")
+ dir = "/";
+
+ AutoCloseFD dirfd(open(dir.c_str(), O_RDONLY));
+ if (!dirfd) {
+ // This really shouldn't fail silently, but it's left this way
+ // for backwards compatibility.
+ if (errno == ENOENT) return;
+
+ throw SysError(format("opening directory '%1%'") % path);
+ }
+
+ _deletePath(dirfd.get(), path, bytesFreed);
+}
+
void deletePath(const Path & path)
{
@@ -616,9 +645,9 @@ void writeFull(int fd, const string & s, bool allowInterrupts)
}
-string drainFD(int fd, bool block)
+string drainFD(int fd, bool block, const size_t reserveSize)
{
- StringSink sink;
+ StringSink sink(reserveSize);
drainFD(fd, sink, block);
return std::move(*sink.s);
}
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index 978e14a30..30b647a6a 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -18,6 +18,7 @@
#include <sstream>
#include <optional>
#include <future>
+#include <iterator>
#ifndef HAVE_STRUCT_DIRENT_D_TYPE
#define DT_UNKNOWN 0
@@ -58,12 +59,12 @@ Path canonPath(const Path & path, bool resolveSymlinks = false);
/* Return the directory part of the given canonical path, i.e.,
everything before the final `/'. If the path is the root or an
- immediate child thereof (e.g., `/foo'), this means an empty string
- is returned. */
+ immediate child thereof (e.g., `/foo'), this means `/'
+ is returned.*/
Path dirOf(const Path & path);
/* Return the base name of the given canonical path, i.e., everything
- following the final `/'. */
+ following the final `/' (trailing slashes are removed). */
std::string_view baseNameOf(std::string_view path);
/* Check whether 'path' is a descendant of 'dir'. */
@@ -103,7 +104,7 @@ unsigned char getFileType(const Path & path);
/* Read the contents of a file into a string. */
string readFile(int fd);
-string readFile(const Path & path, bool drain = false);
+string readFile(const Path & path);
void readFile(const Path & path, Sink & sink);
/* Write a string to a file. */
@@ -162,7 +163,7 @@ MakeError(EndOfFile, Error);
/* Read a file descriptor until EOF occurs. */
-string drainFD(int fd, bool block = true);
+string drainFD(int fd, bool block = true, const size_t reserveSize=0);
void drainFD(int fd, Sink & sink, bool block = true);
@@ -389,17 +390,6 @@ string replaceStrings(const std::string & s,
std::string rewriteStrings(const std::string & s, const StringMap & rewrites);
-/* If a set contains 'from', remove it and insert 'to'. */
-template<typename T>
-void replaceInSet(std::set<T> & set, const T & from, const T & to)
-{
- auto i = set.find(from);
- if (i == set.end()) return;
- set.erase(i);
- set.insert(to);
-}
-
-
/* Convert the exit status of a child as returned by wait() into an
error string. */
string statusToString(int status);
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 589a061c7..3ccf620c9 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -1,8 +1,9 @@
#include "shared.hh"
#include "globals.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "../nix/legacy.hh"
+#include "fetchers.hh"
#include <fcntl.h>
#include <regex>
@@ -86,12 +87,9 @@ static void update(const StringSet & channelNames)
// We want to download the url to a file to see if it's a tarball while also checking if we
// got redirected in the process, so that we can grab the various parts of a nix channel
// definition from a consistent location if the redirect changes mid-download.
- CachedDownloadRequest request(url);
- request.ttl = 0;
- auto dl = getDownloader();
- auto result = dl->downloadCached(store, request);
- auto filename = result.path;
- url = chomp(result.effectiveUri);
+ auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false);
+ auto filename = store->toRealPath(result.storePath);
+ url = result.effectiveUrl;
// If the URL contains a version number, append it to the name
// attribute (so that "nix-env -q" on the channels profile
@@ -114,11 +112,10 @@ static void update(const StringSet & channelNames)
if (!unpacked) {
// Download the channel tarball.
try {
- filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path;
- } catch (DownloadError & e) {
- filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path;
+ filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath);
+ } catch (FileTransferError & e) {
+ filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath);
}
- chomp(filename);
}
// Regardless of where it came from, add the expression representing this channel to accumulated expression
@@ -185,6 +182,8 @@ static int _main(int argc, char ** argv)
} else if (*arg == "--rollback") {
cmd = cRollback;
} else {
+ if (hasPrefix(*arg, "-"))
+ throw UsageError("unsupported argument '%s'", *arg);
args.push_back(std::move(*arg));
}
return true;
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 76008d00c..0f99f5fe1 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -721,28 +721,39 @@ static void uninstallDerivations(Globals & globals, Strings & selectors,
while (true) {
string lockToken = optimisticLockProfile(profile);
- DrvInfos installedElems = queryInstalled(*globals.state, profile);
- DrvInfos newElems;
-
- for (auto & i : installedElems) {
- DrvName drvName(i.queryName());
- bool found = false;
- for (auto & j : selectors)
- /* !!! the repeated calls to followLinksToStorePath()
- are expensive, should pre-compute them. */
- if ((isPath(j) && globals.state->store->parseStorePath(i.queryOutPath()) == globals.state->store->followLinksToStorePath(j))
- || DrvName(j).matches(drvName))
- {
- printInfo("uninstalling '%s'", i.queryName());
- found = true;
- break;
- }
- if (!found) newElems.push_back(i);
+ DrvInfos workingElems = queryInstalled(*globals.state, profile);
+
+ for (auto & selector : selectors) {
+ DrvInfos::iterator split = workingElems.begin();
+ if (isPath(selector)) {
+ StorePath selectorStorePath = globals.state->store->followLinksToStorePath(selector);
+ split = std::partition(
+ workingElems.begin(), workingElems.end(),
+ [&selectorStorePath, globals](auto &elem) {
+ return selectorStorePath != globals.state->store->parseStorePath(elem.queryOutPath());
+ }
+ );
+ } else {
+ DrvName selectorName(selector);
+ split = std::partition(
+ workingElems.begin(), workingElems.end(),
+ [&selectorName](auto &elem){
+ DrvName elemName(elem.queryName());
+ return !selectorName.matches(elemName);
+ }
+ );
+ }
+ if (split == workingElems.end())
+ warn("selector '%s' matched no installed derivations", selector);
+ for (auto removedElem = split; removedElem != workingElems.end(); removedElem++) {
+ printInfo("uninstalling '%s'", removedElem->queryName());
+ }
+ workingElems.erase(split, workingElems.end());
}
if (globals.dryRun) return;
- if (createUserEnv(*globals.state, newElems,
+ if (createUserEnv(*globals.state, workingElems,
profile, settings.envKeepDerivations, lockToken)) break;
}
}
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index d3d05cc50..5bf9fa60a 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -1,6 +1,6 @@
#include "hash.hh"
#include "shared.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "store-api.hh"
#include "eval.hh"
#include "eval-inline.hh"
@@ -180,9 +180,9 @@ static int _main(int argc, char * * argv)
FdSink sink(fd.get());
- DownloadRequest req(actualUri);
+ FileTransferRequest req(actualUri);
req.decompress = false;
- getDownloader()->download(std::move(req), sink);
+ getFileTransfer()->download(std::move(req), sink);
}
/* Optionally unpack the file. */
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index 139db3657..1d298903b 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -14,12 +14,13 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{
expectArg("path", &path);
- mkFlag()
- .longName("name")
- .shortName('n')
- .description("name component of the store path")
- .labels({"name"})
- .dest(&namePart);
+ addFlag({
+ .longName = "name",
+ .shortName = 'n',
+ .description = "name component of the store path",
+ .labels = {"name"},
+ .handler = {&namePart},
+ });
}
std::string description() override
@@ -33,6 +34,8 @@ struct CmdAddToStore : MixDryRun, StoreCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
if (!namePart) namePart = baseNameOf(path);
@@ -50,7 +53,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
if (!dryRun)
store->addToStore(info, sink.s);
- std::cout << fmt("%s\n", store->printStorePath(info.path));
+ logger->stdout("%s", store->printStorePath(info.path));
}
};
diff --git a/src/nix/build.cc b/src/nix/build.cc
index 0b0762836..850e09ce8 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -11,17 +11,19 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
CmdBuild()
{
- mkFlag()
- .longName("out-link")
- .shortName('o')
- .description("path of the symlink to the build result")
- .labels({"path"})
- .dest(&outLink);
+ addFlag({
+ .longName = "out-link",
+ .shortName = 'o',
+ .description = "path of the symlink to the build result",
+ .labels = {"path"},
+ .handler = {&outLink},
+ });
- mkFlag()
- .longName("no-link")
- .description("do not create a symlink to the build result")
- .set(&outLink, Path(""));
+ addFlag({
+ .longName = "no-link",
+ .description = "do not create a symlink to the build result",
+ .handler = {&outLink, Path("")},
+ });
}
std::string description() override
diff --git a/src/nix/cat.cc b/src/nix/cat.cc
index 4e14f50ed..c82819af8 100644
--- a/src/nix/cat.cc
+++ b/src/nix/cat.cc
@@ -30,9 +30,11 @@ struct CmdCatStore : StoreCommand, MixCat
std::string description() override
{
- return "print the contents of a store file on stdout";
+ return "print the contents of a file in the Nix store on stdout";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
cat(store->getFSAccessor());
@@ -51,9 +53,11 @@ struct CmdCatNar : StoreCommand, MixCat
std::string description() override
{
- return "print the contents of a file inside a NAR file";
+ return "print the contents of a file inside a NAR file on stdout";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
cat(makeNarAccessor(make_ref<std::string>(readFile(narPath))));
diff --git a/src/nix/command.cc b/src/nix/command.cc
index 99b24d2a2..71b027719 100644
--- a/src/nix/command.cc
+++ b/src/nix/command.cc
@@ -35,16 +35,18 @@ StorePathsCommand::StorePathsCommand(bool recursive)
: recursive(recursive)
{
if (recursive)
- mkFlag()
- .longName("no-recursive")
- .description("apply operation to specified paths only")
- .set(&this->recursive, false);
+ addFlag({
+ .longName = "no-recursive",
+ .description = "apply operation to specified paths only",
+ .handler = {&this->recursive, false},
+ });
else
- mkFlag()
- .longName("recursive")
- .shortName('r')
- .description("apply operation to closure of the specified paths")
- .set(&this->recursive, true);
+ addFlag({
+ .longName = "recursive",
+ .shortName = 'r',
+ .description = "apply operation to closure of the specified paths",
+ .handler = {&this->recursive, true},
+ });
mkFlag(0, "all", "apply operation to the entire store", &all);
}
@@ -101,11 +103,12 @@ Strings editorFor(const Pos & pos)
MixProfile::MixProfile()
{
- mkFlag()
- .longName("profile")
- .description("profile to update")
- .labels({"path"})
- .dest(&profile);
+ addFlag({
+ .longName = "profile",
+ .description = "profile to update",
+ .labels = {"path"},
+ .handler = {&profile},
+ });
}
void MixProfile::updateProfile(const StorePath & storePath)
@@ -145,28 +148,30 @@ MixDefaultProfile::MixDefaultProfile()
profile = getDefaultProfile();
}
-MixEnvironment::MixEnvironment() : ignoreEnvironment(false) {
- mkFlag()
- .longName("ignore-environment")
- .shortName('i')
- .description("clear the entire environment (except those specified with --keep)")
- .set(&ignoreEnvironment, true);
-
- mkFlag()
- .longName("keep")
- .shortName('k')
- .description("keep specified environment variable")
- .arity(1)
- .labels({"name"})
- .handler([&](std::vector<std::string> ss) { keep.insert(ss.front()); });
-
- mkFlag()
- .longName("unset")
- .shortName('u')
- .description("unset specified environment variable")
- .arity(1)
- .labels({"name"})
- .handler([&](std::vector<std::string> ss) { unset.insert(ss.front()); });
+MixEnvironment::MixEnvironment() : ignoreEnvironment(false)
+{
+ addFlag({
+ .longName = "ignore-environment",
+ .shortName = 'i',
+ .description = "clear the entire environment (except those specified with --keep)",
+ .handler = {&ignoreEnvironment, true},
+ });
+
+ addFlag({
+ .longName = "keep",
+ .shortName = 'k',
+ .description = "keep specified environment variable",
+ .labels = {"name"},
+ .handler = {[&](std::string s) { keep.insert(s); }},
+ });
+
+ addFlag({
+ .longName = "unset",
+ .shortName = 'u',
+ .description = "unset specified environment variable",
+ .labels = {"name"},
+ .handler = {[&](std::string s) { unset.insert(s); }},
+ });
}
void MixEnvironment::setEnviron() {
diff --git a/src/nix/command.hh b/src/nix/command.hh
index 23f5c9898..959d5f19d 100644
--- a/src/nix/command.hh
+++ b/src/nix/command.hh
@@ -10,6 +10,10 @@ namespace nix {
extern std::string programPath;
+static constexpr Command::Category catSecondary = 100;
+static constexpr Command::Category catUtility = 101;
+static constexpr Command::Category catNixInstallation = 102;
+
/* A command that requires a Nix store. */
struct StoreCommand : virtual Command
{
@@ -41,7 +45,7 @@ private:
std::shared_ptr<EvalState> evalState;
- Value * vSourceExpr = 0;
+ RootValue vSourceExpr;
};
enum RealiseMode { Build, NoBuild, DryRun };
diff --git a/src/nix/copy.cc b/src/nix/copy.cc
index 85c777d38..c7c38709d 100644
--- a/src/nix/copy.cc
+++ b/src/nix/copy.cc
@@ -19,27 +19,32 @@ struct CmdCopy : StorePathsCommand
CmdCopy()
: StorePathsCommand(true)
{
- mkFlag()
- .longName("from")
- .labels({"store-uri"})
- .description("URI of the source Nix store")
- .dest(&srcUri);
- mkFlag()
- .longName("to")
- .labels({"store-uri"})
- .description("URI of the destination Nix store")
- .dest(&dstUri);
-
- mkFlag()
- .longName("no-check-sigs")
- .description("do not require that paths are signed by trusted keys")
- .set(&checkSigs, NoCheckSigs);
-
- mkFlag()
- .longName("substitute-on-destination")
- .shortName('s')
- .description("whether to try substitutes on the destination store (only supported by SSH)")
- .set(&substitute, Substitute);
+ addFlag({
+ .longName = "from",
+ .description = "URI of the source Nix store",
+ .labels = {"store-uri"},
+ .handler = {&srcUri},
+ });
+
+ addFlag({
+ .longName = "to",
+ .description = "URI of the destination Nix store",
+ .labels = {"store-uri"},
+ .handler = {&dstUri},
+ });
+
+ addFlag({
+ .longName = "no-check-sigs",
+ .description = "do not require that paths are signed by trusted keys",
+ .handler = {&checkSigs, NoCheckSigs},
+ });
+
+ addFlag({
+ .longName = "substitute-on-destination",
+ .shortName = 's',
+ .description = "whether to try substitutes on the destination store (only supported by SSH)",
+ .handler = {&substitute, Substitute},
+ });
}
std::string description() override
@@ -75,6 +80,8 @@ struct CmdCopy : StorePathsCommand
};
}
+ Category category() override { return catSecondary; }
+
ref<Store> createStore() override
{
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
diff --git a/src/nix/shell.cc b/src/nix/dev-shell.cc
index e0546ef78..d300f6a23 100644
--- a/src/nix/shell.cc
+++ b/src/nix/dev-shell.cc
@@ -13,7 +13,8 @@ using namespace nix;
struct Var
{
- bool exported;
+ bool exported = true;
+ bool associative = false;
std::string value; // quoted string or array
};
@@ -48,11 +49,17 @@ BuildEnvironment readEnvironment(const Path & path)
static std::string quotedStringRegex =
R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
- static std::string arrayRegex =
- R"re((?:\(( *\[[^\]]+\]="(?:[^"\\]|\\.)*")*\)))re";
+ static std::string indexedArrayRegex =
+ R"re((?:\(( *\[[0-9]+]="(?:[^"\\]|\\.)*")**\)))re";
static std::regex varRegex(
- "^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + arrayRegex + ")\n");
+ "^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n");
+
+ /* Note: we distinguish between an indexed and associative array
+ using the space before the closing parenthesis. Will
+ undoubtedly regret this some day. */
+ static std::regex assocArrayRegex(
+ "^(" + varNameRegex + ")=" + R"re((?:\(( *\[[^\]]+\]="(?:[^"\\]|\\.)*")* *\)))re" + "\n");
static std::regex functionRegex(
"^" + varNameRegex + " \\(\\) *\n");
@@ -68,7 +75,12 @@ BuildEnvironment readEnvironment(const Path & path)
else if (std::regex_search(pos, file.cend(), match, varRegex)) {
pos = match[0].second;
- res.env.insert({match[1], Var { (bool) exported.count(match[1]), match[2] }});
+ res.env.insert({match[1], Var { .exported = exported.count(match[1]) > 0, .value = match[2] }});
+ }
+
+ else if (std::regex_search(pos, file.cend(), match, assocArrayRegex)) {
+ pos = match[0].second;
+ res.env.insert({match[1], Var { .associative = true, .value = match[2] }});
}
else if (std::regex_search(pos, file.cend(), match, functionRegex)) {
@@ -83,27 +95,26 @@ BuildEnvironment readEnvironment(const Path & path)
return res;
}
+const static std::string getEnvSh =
+ #include "get-env.sh.gen.hh"
+ ;
+
/* Given an existing derivation, return the shell environment as
initialised by stdenv's setup script. We do this by building a
modified derivation with the same dependencies and nearly the same
initial environment variables, that just writes the resulting
environment to a file and exits. */
-StorePath getDerivationEnvironment(ref<Store> store, Derivation drv)
+StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
{
+ auto drv = store->derivationFromPath(drvPath);
+
auto builder = baseNameOf(drv.builder);
if (builder != "bash")
throw Error("'nix dev-shell' only works on derivations that use 'bash' as their builder");
- drv.args = {
- "-c",
- "set -e; "
- "export IN_NIX_SHELL=impure; "
- "export dontAddDisableDepTrack=1; "
- "if [[ -n $stdenv ]]; then "
- " source $stdenv/setup; "
- "fi; "
- "export > $out; "
- "set >> $out "};
+ auto getEnvShPath = store->addTextToStore("get-env.sh", getEnvSh, {});
+
+ drv.args = {store->printStorePath(getEnvShPath)};
/* Remove derivation checks. */
drv.env.erase("allowedReferences");
@@ -111,15 +122,17 @@ StorePath getDerivationEnvironment(ref<Store> store, Derivation drv)
drv.env.erase("disallowedReferences");
drv.env.erase("disallowedRequisites");
- // FIXME: handle structured attrs
-
/* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */
- auto drvName = drv.env["name"] + "-env";
+ auto drvName = std::string(drvPath.name());
+ assert(hasSuffix(drvName, ".drv"));
+ drvName.resize(drvName.size() - 4);
+ drvName += "-env";
for (auto & output : drv.outputs)
drv.env.erase(output.first);
drv.env["out"] = "";
drv.env["outputs"] = "out";
+ drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = hashDerivationModulo(*store, drv, true);
auto shellOutPath = store->makeOutputPath("out", h, drvName);
drv.outputs.insert_or_assign("out", DerivationOutput(shellOutPath.clone(), "", ""));
@@ -159,13 +172,19 @@ struct Common : InstallableCommand, MixProfile
void makeRcScript(const BuildEnvironment & buildEnvironment, std::ostream & out)
{
+ out << "unset shellHook\n";
+
out << "nix_saved_PATH=\"$PATH\"\n";
for (auto & i : buildEnvironment.env) {
if (!ignoreVars.count(i.first) && !hasPrefix(i.first, "BASH_")) {
- out << fmt("%s=%s\n", i.first, i.second.value);
- if (i.second.exported)
- out << fmt("export %s\n", i.first);
+ if (i.second.associative)
+ out << fmt("declare -A %s=(%s)\n", i.first, i.second.value);
+ else {
+ out << fmt("%s=%s\n", i.first, i.second.value);
+ if (i.second.exported)
+ out << fmt("export %s\n", i.first);
+ }
}
}
@@ -196,17 +215,19 @@ struct Common : InstallableCommand, MixProfile
auto & drvPath = *drvs.begin();
- return getDerivationEnvironment(store, store->derivationFromPath(drvPath));
+ return getDerivationEnvironment(store, drvPath);
}
}
- BuildEnvironment getBuildEnvironment(ref<Store> store)
+ std::pair<BuildEnvironment, std::string> getBuildEnvironment(ref<Store> store)
{
auto shellOutPath = getShellOutPath(store);
+ auto strPath = store->printStorePath(shellOutPath);
+
updateProfile(shellOutPath);
- return readEnvironment(store->printStorePath(shellOutPath));
+ return {readEnvironment(strPath), strPath};
}
};
@@ -216,16 +237,16 @@ struct CmdDevShell : Common, MixEnvironment
CmdDevShell()
{
- mkFlag()
- .longName("command")
- .shortName('c')
- .description("command and arguments to be executed instead of an interactive shell")
- .labels({"command", "args"})
- .arity(ArityAny)
- .handler([&](std::vector<std::string> ss) {
+ addFlag({
+ .longName = "command",
+ .shortName = 'c',
+ .description = "command and arguments to be executed insted of an interactive shell",
+ .labels = {"command", "args"},
+ .handler = {[&](std::vector<std::string> ss) {
if (ss.empty()) throw UsageError("--command requires at least one argument");
command = ss;
- });
+ }}
+ });
}
std::string description() override
@@ -253,7 +274,7 @@ struct CmdDevShell : Common, MixEnvironment
void run(ref<Store> store) override
{
- auto buildEnvironment = getBuildEnvironment(store);
+ auto [buildEnvironment, gcroot] = getBuildEnvironment(store);
auto [rcFileFd, rcFilePath] = createTempFile("nix-shell");
@@ -276,6 +297,8 @@ struct CmdDevShell : Common, MixEnvironment
auto shell = getEnv("SHELL").value_or("bash");
setEnviron();
+ // prevent garbage collection until shell exits
+ setenv("NIX_GCROOT", gcroot.data(), 1);
auto args = Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath};
@@ -305,9 +328,11 @@ struct CmdPrintDevEnv : Common
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
- auto buildEnvironment = getBuildEnvironment(store);
+ auto buildEnvironment = getBuildEnvironment(store).first;
stopProgressBar();
diff --git a/src/nix/doctor.cc b/src/nix/doctor.cc
index 0aa634d6e..82e92cdd0 100644
--- a/src/nix/doctor.cc
+++ b/src/nix/doctor.cc
@@ -40,9 +40,11 @@ struct CmdDoctor : StoreCommand
std::string description() override
{
- return "check your system for potential problems and print a PASS or FAIL for each check.";
+ return "check your system for potential problems and print a PASS or FAIL for each check";
}
+ Category category() override { return catNixInstallation; }
+
void run(ref<Store> store) override
{
logger->log("Running checks against store uri: " + store->getUri());
diff --git a/src/nix/dump-path.cc b/src/nix/dump-path.cc
index bb741b572..e1de71bf8 100644
--- a/src/nix/dump-path.cc
+++ b/src/nix/dump-path.cc
@@ -20,6 +20,8 @@ struct CmdDumpPath : StorePathCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, const StorePath & storePath) override
{
FdSink sink(STDOUT_FILENO);
diff --git a/src/nix/edit.cc b/src/nix/edit.cc
index 1683eada0..067d3a973 100644
--- a/src/nix/edit.cc
+++ b/src/nix/edit.cc
@@ -25,6 +25,8 @@ struct CmdEdit : InstallableCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
auto state = getEvalState();
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index 6398fc58e..26e98ac2a 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -45,6 +45,8 @@ struct CmdEval : MixJSON, InstallableCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
if (raw && json)
@@ -55,16 +57,15 @@ struct CmdEval : MixJSON, InstallableCommand
auto v = installable->toValue(*state).first;
PathSet context;
- stopProgressBar();
-
if (raw) {
+ stopProgressBar();
std::cout << state->coerceToString(noPos, *v, context);
} else if (json) {
JSONPlaceholder jsonOut(std::cout);
printValueAsJSON(*state, true, *v, jsonOut, context);
} else {
state->forceValueDeep(*v);
- std::cout << *v << "\n";
+ logger->stdout("%s", *v);
}
}
};
diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh
new file mode 100644
index 000000000..a25ec43a9
--- /dev/null
+++ b/src/nix/get-env.sh
@@ -0,0 +1,9 @@
+set -e
+if [ -e .attrs.sh ]; then source .attrs.sh; fi
+export IN_NIX_SHELL=impure
+export dontAddDisableDepTrack=1
+if [[ -n $stdenv ]]; then
+ source $stdenv/setup
+fi
+export > $out
+set >> $out
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 128708339..f668d5282 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -23,9 +23,7 @@ struct CmdHash : Command
mkFlag(0, "base64", "print hash in base-64", &base, Base64);
mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base32);
mkFlag(0, "base16", "print hash in base-16", &base, Base16);
- mkFlag()
- .longName("type")
- .mkHashTypeFlag(&ht);
+ addFlag(Flag::mkHashTypeFlag("type", &ht));
#if 0
mkFlag()
.longName("modulo")
@@ -43,6 +41,8 @@ struct CmdHash : Command
: "print cryptographic hash of the NAR serialisation of a path";
}
+ Category category() override { return catUtility; }
+
void run() override
{
for (auto path : paths) {
@@ -60,8 +60,7 @@ struct CmdHash : Command
Hash h = hashSink->finish().first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
- std::cout << format("%1%\n") %
- h.to_string(base, base == SRI);
+ logger->stdout(h.to_string(base, base == SRI));
}
}
};
@@ -77,9 +76,7 @@ struct CmdToBase : Command
CmdToBase(Base base) : base(base)
{
- mkFlag()
- .longName("type")
- .mkHashTypeFlag(&ht);
+ addFlag(Flag::mkHashTypeFlag("type", &ht));
expectArgs("strings", &args);
}
@@ -92,10 +89,12 @@ struct CmdToBase : Command
"SRI");
}
+ Category category() override { return catUtility; }
+
void run() override
{
for (auto s : args)
- std::cout << fmt("%s\n", Hash(s, ht).to_string(base, base == SRI));
+ logger->stdout(Hash(s, ht).to_string(base, base == SRI));
}
};
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index f464d0aa1..937d69206 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -12,26 +12,28 @@
namespace nix {
+
SourceExprCommand::SourceExprCommand()
{
- mkFlag()
- .shortName('f')
- .longName("file")
- .label("file")
- .description("evaluate FILE rather than the default")
- .dest(&file);
+ addFlag({
+ .longName = "file",
+ .shortName = 'f',
+ .description = "evaluate FILE rather than the default",
+ .labels = {"file"},
+ .handler = {&file}
+ });
}
Value * SourceExprCommand::getSourceExpr(EvalState & state)
{
- if (vSourceExpr) return vSourceExpr;
+ if (vSourceExpr) return *vSourceExpr;
auto sToplevel = state.symbols.create("_toplevel");
- vSourceExpr = state.allocValue();
+ vSourceExpr = allocRootValue(state.allocValue());
if (file != "")
- state.evalFile(lookupFileArg(state, file), *vSourceExpr);
+ state.evalFile(lookupFileArg(state, file), **vSourceExpr);
else {
@@ -39,9 +41,9 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state)
auto searchPath = state.getSearchPath();
- state.mkAttrs(*vSourceExpr, 1024);
+ state.mkAttrs(**vSourceExpr, 1024);
- mkBool(*state.allocAttr(*vSourceExpr, sToplevel), true);
+ mkBool(*state.allocAttr(**vSourceExpr, sToplevel), true);
std::unordered_set<std::string> seen;
@@ -52,7 +54,7 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state)
mkPrimOpApp(*v1, state.getBuiltin("findFile"), state.getBuiltin("nixPath"));
Value * v2 = state.allocValue();
mkApp(*v2, *v1, mkString(*state.allocValue(), name));
- mkApp(*state.allocAttr(*vSourceExpr, state.symbols.create(name)),
+ mkApp(*state.allocAttr(**vSourceExpr, state.symbols.create(name)),
state.getBuiltin("import"), *v2);
};
@@ -66,10 +68,10 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state)
} else
addEntry(i.first);
- vSourceExpr->attrs->sort();
+ (*vSourceExpr)->attrs->sort();
}
- return vSourceExpr;
+ return *vSourceExpr;
}
ref<EvalState> SourceExprCommand::getEvalState()
diff --git a/src/nix/local.mk b/src/nix/local.mk
index 50a18efd7..8c0eed19e 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -15,9 +15,9 @@ nix_SOURCES := \
$(wildcard src/nix-prefetch-url/*.cc) \
$(wildcard src/nix-store/*.cc) \
-nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain
+nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain
-nix_LIBS = libexpr libmain libstore libutil libnixrust
+nix_LIBS = libexpr libmain libfetchers libstore libutil libnixrust
nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system
@@ -27,3 +27,5 @@ $(foreach name, \
$(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote))
src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
+
+src/nix/dev-shell.cc: src/nix/get-env.sh.gen.hh
diff --git a/src/nix/log.cc b/src/nix/log.cc
index 795991cb7..3fe22f6c2 100644
--- a/src/nix/log.cc
+++ b/src/nix/log.cc
@@ -31,6 +31,8 @@ struct CmdLog : InstallableCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
settings.readOnlyMode = true;
diff --git a/src/nix/ls.cc b/src/nix/ls.cc
index 5d55bd4d5..d2157f2d4 100644
--- a/src/nix/ls.cc
+++ b/src/nix/ls.cc
@@ -34,16 +34,14 @@ struct MixLs : virtual Args, MixJSON
(st.isExecutable ? "-r-xr-xr-x" : "-r--r--r--") :
st.type == FSAccessor::Type::tSymlink ? "lrwxrwxrwx" :
"dr-xr-xr-x";
- std::cout <<
- (format("%s %20d %s") % tp % st.fileSize % relPath);
+ auto line = fmt("%s %20d %s", tp, st.fileSize, relPath);
if (st.type == FSAccessor::Type::tSymlink)
- std::cout << " -> " << accessor->readLink(curPath)
- ;
- std::cout << "\n";
+ line += " -> " + accessor->readLink(curPath);
+ logger->stdout(line);
if (recursive && st.type == FSAccessor::Type::tDirectory)
doPath(st, curPath, relPath, false);
} else {
- std::cout << relPath << "\n";
+ logger->stdout(relPath);
if (recursive) {
auto st = accessor->stat(curPath);
if (st.type == FSAccessor::Type::tDirectory)
@@ -102,9 +100,11 @@ struct CmdLsStore : StoreCommand, MixLs
std::string description() override
{
- return "show information about a store path";
+ return "show information about a path in the Nix store";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
list(store->getFSAccessor());
@@ -133,12 +133,14 @@ struct CmdLsNar : Command, MixLs
std::string description() override
{
- return "show information about the contents of a NAR file";
+ return "show information about a path inside a NAR file";
}
+ Category category() override { return catUtility; }
+
void run() override
{
- list(makeNarAccessor(make_ref<std::string>(readFile(narPath, true))));
+ list(makeNarAccessor(make_ref<std::string>(readFile(narPath))));
}
};
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 3b5f5516f..5cf09c4f0 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -8,7 +8,7 @@
#include "shared.hh"
#include "store-api.hh"
#include "progress-bar.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "finally.hh"
#include <sys/types.h>
@@ -59,15 +59,22 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
NixArgs() : MultiCommand(*RegisterCommand::commands), MixCommonArgs("nix")
{
- mkFlag()
- .longName("help")
- .description("show usage information")
- .handler([&]() { showHelpAndExit(); });
-
- mkFlag()
- .longName("help-config")
- .description("show configuration options")
- .handler([&]() {
+ categories.clear();
+ categories[Command::catDefault] = "Main commands";
+ categories[catSecondary] = "Infrequently used commands";
+ categories[catUtility] = "Utility/scripting commands";
+ categories[catNixInstallation] = "Commands for upgrading or troubleshooting your Nix installation";
+
+ addFlag({
+ .longName = "help",
+ .description = "show usage information",
+ .handler = {[&]() { showHelpAndExit(); }},
+ });
+
+ addFlag({
+ .longName = "help-config",
+ .description = "show configuration options",
+ .handler = {[&]() {
std::cout << "The following configuration options are available:\n\n";
Table2 tbl;
std::map<std::string, Config::SettingInfo> settings;
@@ -76,28 +83,33 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
tbl.emplace_back(s.first, s.second.description);
printTable(std::cout, tbl);
throw Exit();
- });
-
- mkFlag()
- .longName("print-build-logs")
- .shortName('L')
- .description("print full build logs on stderr")
- .set(&printBuildLogs, true);
-
- mkFlag()
- .longName("version")
- .description("show version information")
- .handler([&]() { printVersion(programName); });
-
- mkFlag()
- .longName("no-net")
- .description("disable substituters and consider all previously downloaded files up-to-date")
- .handler([&]() { useNet = false; });
-
- mkFlag()
- .longName("refresh")
- .description("consider all previously downloaded files out-of-date")
- .handler([&]() { refresh = true; });
+ }},
+ });
+
+ addFlag({
+ .longName = "print-build-logs",
+ .shortName = 'L',
+ .description = "print full build logs on stderr",
+ .handler = {&printBuildLogs, true},
+ });
+
+ addFlag({
+ .longName = "version",
+ .description = "show version information",
+ .handler = {[&]() { printVersion(programName); }},
+ });
+
+ addFlag({
+ .longName = "no-net",
+ .description = "disable substituters and consider all previously downloaded files up-to-date",
+ .handler = {[&]() { useNet = false; }},
+ });
+
+ addFlag({
+ .longName = "refresh",
+ .description = "consider all previously downloaded files out-of-date",
+ .handler = {[&]() { refresh = true; }},
+ });
}
void printFlags(std::ostream & out) override
@@ -105,8 +117,8 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
Args::printFlags(out);
std::cout <<
"\n"
- "In addition, most configuration settings can be overriden using '--<name> <value>'.\n"
- "Boolean settings can be overriden using '--<name>' or '--no-<name>'. See 'nix\n"
+ "In addition, most configuration settings can be overriden using '--" ANSI_ITALIC "name value" ANSI_NORMAL "'.\n"
+ "Boolean settings can be overriden using '--" ANSI_ITALIC "name" ANSI_NORMAL "' or '--no-" ANSI_ITALIC "name" ANSI_NORMAL "'. See 'nix\n"
"--help-config' for a list of configuration settings.\n";
}
@@ -115,10 +127,10 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
MultiCommand::printHelp(programName, out);
#if 0
- out << "\nFor full documentation, run 'man " << programName << "' or 'man " << programName << "-<COMMAND>'.\n";
+ out << "\nFor full documentation, run 'man " << programName << "' or 'man " << programName << "-" ANSI_ITALIC "COMMAND" ANSI_NORMAL "'.\n";
#endif
- std::cout << "\nNote: this program is EXPERIMENTAL and subject to change.\n";
+ std::cout << "\nNote: this program is " ANSI_RED "EXPERIMENTAL" ANSI_NORMAL " and subject to change.\n";
}
void showHelpAndExit()
@@ -176,17 +188,17 @@ void mainWrapped(int argc, char * * argv)
settings.useSubstitutes = false;
if (!settings.tarballTtl.overriden)
settings.tarballTtl = std::numeric_limits<unsigned int>::max();
- if (!downloadSettings.tries.overriden)
- downloadSettings.tries = 0;
- if (!downloadSettings.connectTimeout.overriden)
- downloadSettings.connectTimeout = 1;
+ if (!fileTransferSettings.tries.overriden)
+ fileTransferSettings.tries = 0;
+ if (!fileTransferSettings.connectTimeout.overriden)
+ fileTransferSettings.connectTimeout = 1;
}
if (args.refresh)
settings.tarballTtl = 0;
- args.command->prepare();
- args.command->run();
+ args.command->second->prepare();
+ args.command->second->run();
}
}
diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc
index f9c7fef3f..8803461f4 100644
--- a/src/nix/make-content-addressable.cc
+++ b/src/nix/make-content-addressable.cc
@@ -31,6 +31,9 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
},
};
}
+
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
auto paths = store->topoSortPaths(storePathsToSet(storePaths));
diff --git a/src/nix/optimise-store.cc b/src/nix/optimise-store.cc
index fed012b04..b45951879 100644
--- a/src/nix/optimise-store.cc
+++ b/src/nix/optimise-store.cc
@@ -23,6 +23,8 @@ struct CmdOptimiseStore : StoreCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
store->optimiseStore();
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index 45ec297d2..88d7fffd4 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -29,6 +29,8 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
return "query information about store paths";
}
+ Category category() override { return catSecondary; }
+
Examples examples() override
{
return {
diff --git a/src/nix/ping-store.cc b/src/nix/ping-store.cc
index 3a2e542a3..127397a29 100644
--- a/src/nix/ping-store.cc
+++ b/src/nix/ping-store.cc
@@ -21,6 +21,8 @@ struct CmdPingStore : StoreCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
store->connect();
diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc
index b1226d550..2fe26a001 100644
--- a/src/nix/progress-bar.cc
+++ b/src/nix/progress-bar.cc
@@ -7,6 +7,7 @@
#include <atomic>
#include <map>
#include <thread>
+#include <iostream>
namespace nix {
@@ -200,8 +201,8 @@ public:
i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1));
}
- if ((type == actDownload && hasAncestor(*state, actCopyPath, parent))
- || (type == actDownload && hasAncestor(*state, actQueryPathInfo, parent))
+ if ((type == actFileTransfer && hasAncestor(*state, actCopyPath, parent))
+ || (type == actFileTransfer && hasAncestor(*state, actQueryPathInfo, parent))
|| (type == actCopyPath && hasAncestor(*state, actSubstitute, parent)))
i->visible = false;
@@ -426,7 +427,7 @@ public:
if (!s2.empty()) { res += " ("; res += s2; res += ')'; }
}
- showActivity(actDownload, "%s MiB DL", "%.1f", MiB);
+ showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB);
{
auto s = renderActivity(actOptimiseStore, "%s paths optimised");
@@ -452,6 +453,18 @@ public:
return res;
}
+
+ void writeToStdout(std::string_view s) override
+ {
+ auto state(state_.lock());
+ if (state->active) {
+ std::cerr << "\r\e[K";
+ Logger::writeToStdout(s);
+ draw(*state);
+ } else {
+ Logger::writeToStdout(s);
+ }
+ }
};
void startProgressBar(bool printBuildLogs)
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index c770fd4a4..4bcaaeebf 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -82,40 +82,6 @@ struct NixRepl : gc
};
-void printHelp()
-{
- std::cout
- << "Usage: nix-repl [--help] [--version] [-I path] paths...\n"
- << "\n"
- << "nix-repl is a simple read-eval-print loop (REPL) for the Nix package manager.\n"
- << "\n"
- << "Options:\n"
- << " --help\n"
- << " Prints out a summary of the command syntax and exits.\n"
- << "\n"
- << " --version\n"
- << " Prints out the Nix version number on standard output and exits.\n"
- << "\n"
- << " -I path\n"
- << " Add a path to the Nix expression search path. This option may be given\n"
- << " multiple times. See the NIX_PATH environment variable for information on\n"
- << " the semantics of the Nix search path. Paths added through -I take\n"
- << " precedence over NIX_PATH.\n"
- << "\n"
- << " paths...\n"
- << " A list of paths to files containing Nix expressions which nix-repl will\n"
- << " load and add to its scope.\n"
- << "\n"
- << " A path surrounded in < and > will be looked up in the Nix expression search\n"
- << " path, as in the Nix language itself.\n"
- << "\n"
- << " If an element of paths starts with http:// or https://, it is interpreted\n"
- << " as the URL of a tarball that will be downloaded and unpacked to a temporary\n"
- << " location. The tarball must include a single top-level directory containing\n"
- << " at least a file named default.nix.\n";
-}
-
-
string removeWhitespace(string s)
{
s = chomp(s);
@@ -809,6 +775,16 @@ struct CmdRepl : StoreCommand, MixEvalArgs
return "start an interactive environment for evaluating Nix expressions";
}
+ Examples examples() override
+ {
+ return {
+ Example{
+ "Display all special commands within the REPL:",
+ "nix repl\n nix-repl> :?"
+ }
+ };
+ }
+
void run(ref<Store> store) override
{
auto repl = std::make_unique<NixRepl>(searchPath, openStore());
diff --git a/src/nix/run.cc b/src/nix/run.cc
index d790979a4..c9b69aec7 100644
--- a/src/nix/run.cc
+++ b/src/nix/run.cc
@@ -57,22 +57,22 @@ struct RunCommon : virtual Command
}
};
-struct CmdRun : InstallablesCommand, RunCommon, MixEnvironment
+struct CmdShell : InstallablesCommand, RunCommon, MixEnvironment
{
- std::vector<std::string> command = { "bash" };
+ std::vector<std::string> command = { getEnv("SHELL").value_or("bash") };
- CmdRun()
+ CmdShell()
{
- mkFlag()
- .longName("command")
- .shortName('c')
- .description("command and arguments to be executed; defaults to 'bash'")
- .labels({"command", "args"})
- .arity(ArityAny)
- .handler([&](std::vector<std::string> ss) {
+ addFlag({
+ .longName = "command",
+ .shortName = 'c',
+ .description = "command and arguments to be executed; defaults to '$SHELL'",
+ .labels = {"command", "args"},
+ .handler = {[&](std::vector<std::string> ss) {
if (ss.empty()) throw UsageError("--command requires at least one argument");
command = ss;
- });
+ }}
+ });
}
std::string description() override
@@ -85,19 +85,19 @@ struct CmdRun : InstallablesCommand, RunCommon, MixEnvironment
return {
Example{
"To start a shell providing GNU Hello from NixOS 17.03:",
- "nix run -f channel:nixos-17.03 hello"
+ "nix shell -f channel:nixos-17.03 hello"
},
Example{
"To start a shell providing youtube-dl from your 'nixpkgs' channel:",
- "nix run nixpkgs.youtube-dl"
+ "nix shell nixpkgs.youtube-dl"
},
Example{
"To run GNU Hello:",
- "nix run nixpkgs.hello -c hello --greeting 'Hi everybody!'"
+ "nix shell nixpkgs.hello -c hello --greeting 'Hi everybody!'"
},
Example{
"To run GNU Hello in a chroot store:",
- "nix run --store ~/my-nix nixpkgs.hello -c hello"
+ "nix shell --store ~/my-nix nixpkgs.hello -c hello"
},
};
}
@@ -141,7 +141,7 @@ struct CmdRun : InstallablesCommand, RunCommon, MixEnvironment
}
};
-static auto r1 = registerCommand<CmdRun>("run");
+static auto r1 = registerCommand<CmdShell>("shell");
void chrootHelper(int argc, char * * argv)
{
diff --git a/src/nix/search.cc b/src/nix/search.cc
index 769274543..ba72c1e79 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -40,16 +40,18 @@ struct CmdSearch : SourceExprCommand, MixJSON
{
expectArgs("regex", &res);
- mkFlag()
- .longName("update-cache")
- .shortName('u')
- .description("update the package search cache")
- .handler([&]() { writeCache = true; useCache = false; });
-
- mkFlag()
- .longName("no-cache")
- .description("do not use or update the package search cache")
- .handler([&]() { writeCache = false; useCache = false; });
+ addFlag({
+ .longName = "update-cache",
+ .shortName = 'u',
+ .description = "update the package search cache",
+ .handler = {[&]() { writeCache = true; useCache = false; }}
+ });
+
+ addFlag({
+ .longName = "no-cache",
+ .description = "do not use or update the package search cache",
+ .handler = {[&]() { writeCache = false; useCache = false; }}
+ });
}
std::string description() override
@@ -263,7 +265,7 @@ struct CmdSearch : SourceExprCommand, MixJSON
throw SysError("cannot rename '%s' to '%s'", tmpFile, jsonCacheFileName);
}
- if (results.size() == 0)
+ if (!json && results.size() == 0)
throw Error("no results for the given search term(s)!");
RunPager pager;
diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc
index 87544f937..4fd8886de 100644
--- a/src/nix/show-config.cc
+++ b/src/nix/show-config.cc
@@ -13,6 +13,8 @@ struct CmdShowConfig : Command, MixJSON
return "show the Nix configuration";
}
+ Category category() override { return catUtility; }
+
void run() override
{
if (json) {
@@ -23,7 +25,7 @@ struct CmdShowConfig : Command, MixJSON
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (auto & s : settings)
- std::cout << s.first + " = " + s.second.value + "\n";
+ logger->stdout("%s = %s", s.first, s.second.value);
}
}
};
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index 0ede7b468..22c569f3c 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -15,11 +15,12 @@ struct CmdShowDerivation : InstallablesCommand
CmdShowDerivation()
{
- mkFlag()
- .longName("recursive")
- .shortName('r')
- .description("include the dependencies of the specified derivations")
- .set(&recursive, true);
+ addFlag({
+ .longName = "recursive",
+ .shortName = 'r',
+ .description = "include the dependencies of the specified derivations",
+ .handler = {&recursive, true}
+ });
}
std::string description() override
@@ -41,6 +42,8 @@ struct CmdShowDerivation : InstallablesCommand
};
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store) override
{
auto drvPaths = toDerivations(store, installables, true);
diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc
index 5f07448e0..6c9b9a792 100644
--- a/src/nix/sigs.cc
+++ b/src/nix/sigs.cc
@@ -13,13 +13,13 @@ struct CmdCopySigs : StorePathsCommand
CmdCopySigs()
{
- mkFlag()
- .longName("substituter")
- .shortName('s')
- .labels({"store-uri"})
- .description("use signatures from specified store")
- .arity(1)
- .handler([&](std::vector<std::string> ss) { substituterUris.push_back(ss[0]); });
+ addFlag({
+ .longName = "substituter",
+ .shortName = 's',
+ .description = "use signatures from specified store",
+ .labels = {"store-uri"},
+ .handler = {[&](std::string s) { substituterUris.push_back(s); }},
+ });
}
std::string description() override
@@ -27,6 +27,8 @@ struct CmdCopySigs : StorePathsCommand
return "copy path signatures from substituters (like binary caches)";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
if (substituterUris.empty())
@@ -98,12 +100,13 @@ struct CmdSignPaths : StorePathsCommand
CmdSignPaths()
{
- mkFlag()
- .shortName('k')
- .longName("key-file")
- .label("file")
- .description("file containing the secret signing key")
- .dest(&secretKeyFile);
+ addFlag({
+ .longName = "key-file",
+ .shortName = 'k',
+ .description = "file containing the secret signing key",
+ .labels = {"file"},
+ .handler = {&secretKeyFile}
+ });
}
std::string description() override
@@ -111,6 +114,8 @@ struct CmdSignPaths : StorePathsCommand
return "sign the specified paths";
}
+ Category category() override { return catUtility; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
if (secretKeyFile.empty())
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
index 831a83fd1..9e7ebcd9c 100644
--- a/src/nix/upgrade-nix.cc
+++ b/src/nix/upgrade-nix.cc
@@ -1,7 +1,7 @@
#include "command.hh"
#include "common-args.hh"
#include "store-api.hh"
-#include "download.hh"
+#include "filetransfer.hh"
#include "eval.hh"
#include "attr-path.hh"
#include "names.hh"
@@ -16,18 +16,20 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
CmdUpgradeNix()
{
- mkFlag()
- .longName("profile")
- .shortName('p')
- .labels({"profile-dir"})
- .description("the Nix profile to upgrade")
- .dest(&profileDir);
-
- mkFlag()
- .longName("nix-store-paths-url")
- .labels({"url"})
- .description("URL of the file that contains the store paths of the latest Nix release")
- .dest(&storePathsUrl);
+ addFlag({
+ .longName = "profile",
+ .shortName = 'p',
+ .description = "the Nix profile to upgrade",
+ .labels = {"profile-dir"},
+ .handler = {&profileDir}
+ });
+
+ addFlag({
+ .longName = "nix-store-paths-url",
+ .description = "URL of the file that contains the store paths of the latest Nix release",
+ .labels = {"url"},
+ .handler = {&storePathsUrl}
+ });
}
std::string description() override
@@ -49,6 +51,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
};
}
+ Category category() override { return catNixInstallation; }
+
void run(ref<Store> store) override
{
evalSettings.pureEval = true;
@@ -142,8 +146,8 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
// FIXME: use nixos.org?
- auto req = DownloadRequest(storePathsUrl);
- auto res = getDownloader()->download(req);
+ auto req = FileTransferRequest(storePathsUrl);
+ auto res = getFileTransfer()->download(req);
auto state = std::make_unique<EvalState>(Strings(), store);
auto v = state->allocValue();
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index f53217239..8c845bfc2 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -20,13 +20,13 @@ struct CmdVerify : StorePathsCommand
{
mkFlag(0, "no-contents", "do not verify the contents of each store path", &noContents);
mkFlag(0, "no-trust", "do not verify whether each store path is trusted", &noTrust);
- mkFlag()
- .longName("substituter")
- .shortName('s')
- .labels({"store-uri"})
- .description("use signatures from specified store")
- .arity(1)
- .handler([&](std::vector<std::string> ss) { substituterUris.push_back(ss[0]); });
+ addFlag({
+ .longName = "substituter",
+ .shortName = 's',
+ .description = "use signatures from specified store",
+ .labels = {"store-uri"},
+ .handler = {[&](std::string s) { substituterUris.push_back(s); }}
+ });
mkIntFlag('n', "sigs-needed", "require that each path has at least N valid signatures", &sigsNeeded);
}
@@ -49,6 +49,8 @@ struct CmdVerify : StorePathsCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store, StorePaths storePaths) override
{
std::vector<ref<Store>> substituters;
diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc
index d3b7a674a..6057beedb 100644
--- a/src/nix/why-depends.cc
+++ b/src/nix/why-depends.cc
@@ -37,11 +37,12 @@ struct CmdWhyDepends : SourceExprCommand
expectArg("package", &_package);
expectArg("dependency", &_dependency);
- mkFlag()
- .longName("all")
- .shortName('a')
- .description("show all edges in the dependency graph leading from 'package' to 'dependency', rather than just a shortest path")
- .set(&all, true);
+ addFlag({
+ .longName = "all",
+ .shortName = 'a',
+ .description = "show all edges in the dependency graph leading from 'package' to 'dependency', rather than just a shortest path",
+ .handler = {&all, true},
+ });
}
std::string description() override
@@ -67,6 +68,8 @@ struct CmdWhyDepends : SourceExprCommand
};
}
+ Category category() override { return catSecondary; }
+
void run(ref<Store> store) override
{
auto package = parseInstallable(*this, store, _package, false);
@@ -149,7 +152,7 @@ struct CmdWhyDepends : SourceExprCommand
auto pathS = store->printStorePath(node.path);
assert(node.dist != inf);
- std::cout << fmt("%s%s%s%s" ANSI_NORMAL "\n",
+ logger->stdout("%s%s%s%s" ANSI_NORMAL,
firstPad,
node.visited ? "\e[38;5;244m" : "",
firstPad != "" ? "→ " : "",
diff --git a/tests/check.nix b/tests/check.nix
index 56c82e565..bca04fdaf 100644
--- a/tests/check.nix
+++ b/tests/check.nix
@@ -1,12 +1,45 @@
+{checkBuildId ? 0}:
+
with import ./config.nix;
{
nondeterministic = mkDerivation {
+ inherit checkBuildId;
name = "nondeterministic";
buildCommand =
''
mkdir $out
date +%s.%N > $out/date
+ echo "CHECK_TMPDIR=$TMPDIR"
+ echo "checkBuildId=$checkBuildId"
+ echo "$checkBuildId" > $TMPDIR/checkBuildId
+ '';
+ };
+
+ deterministic = mkDerivation {
+ inherit checkBuildId;
+ name = "deterministic";
+ buildCommand =
+ ''
+ mkdir $out
+ echo date > $out/date
+ echo "CHECK_TMPDIR=$TMPDIR"
+ echo "checkBuildId=$checkBuildId"
+ echo "$checkBuildId" > $TMPDIR/checkBuildId
+ '';
+ };
+
+ failed = mkDerivation {
+ inherit checkBuildId;
+ name = "failed";
+ buildCommand =
+ ''
+ mkdir $out
+ echo date > $out/date
+ echo "CHECK_TMPDIR=$TMPDIR"
+ echo "checkBuildId=$checkBuildId"
+ echo "$checkBuildId" > $TMPDIR/checkBuildId
+ false
'';
};
diff --git a/tests/check.sh b/tests/check.sh
index bc23a6634..5f25d04cb 100644
--- a/tests/check.sh
+++ b/tests/check.sh
@@ -1,14 +1,57 @@
source common.sh
+checkBuildTempDirRemoved ()
+{
+ buildDir=$(sed -n 's/CHECK_TMPDIR=//p' $1 | head -1)
+ checkBuildIdFile=${buildDir}/checkBuildId
+ [[ ! -f $checkBuildIdFile ]] || ! grep $checkBuildId $checkBuildIdFile
+}
+
+# written to build temp directories to verify created by this instance
+checkBuildId=$(date +%s%N)
+
clearStore
nix-build dependencies.nix --no-out-link
nix-build dependencies.nix --no-out-link --check
-nix-build check.nix -A nondeterministic --no-out-link
-nix-build check.nix -A nondeterministic --no-out-link --check 2> $TEST_ROOT/log || status=$?
+# check for dangling temporary build directories
+# only retain if build fails and --keep-failed is specified, or...
+# ...build is non-deterministic and --check and --keep-failed are both specified
+nix-build check.nix -A failed --argstr checkBuildId $checkBuildId \
+ --no-out-link 2> $TEST_ROOT/log || status=$?
+[ "$status" = "100" ]
+checkBuildTempDirRemoved $TEST_ROOT/log
+
+nix-build check.nix -A failed --argstr checkBuildId $checkBuildId \
+ --no-out-link --keep-failed 2> $TEST_ROOT/log || status=$?
+[ "$status" = "100" ]
+if checkBuildTempDirRemoved $TEST_ROOT/log; then false; fi
+
+nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \
+ --no-out-link 2> $TEST_ROOT/log
+checkBuildTempDirRemoved $TEST_ROOT/log
+
+nix-build check.nix -A deterministic --argstr checkBuildId $checkBuildId \
+ --no-out-link --check --keep-failed 2> $TEST_ROOT/log
+if grep -q 'may not be deterministic' $TEST_ROOT/log; then false; fi
+checkBuildTempDirRemoved $TEST_ROOT/log
+
+nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
+ --no-out-link 2> $TEST_ROOT/log
+checkBuildTempDirRemoved $TEST_ROOT/log
+
+nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
+ --no-out-link --check 2> $TEST_ROOT/log || status=$?
+grep 'may not be deterministic' $TEST_ROOT/log
+[ "$status" = "104" ]
+checkBuildTempDirRemoved $TEST_ROOT/log
+
+nix-build check.nix -A nondeterministic --argstr checkBuildId $checkBuildId \
+ --no-out-link --check --keep-failed 2> $TEST_ROOT/log || status=$?
grep 'may not be deterministic' $TEST_ROOT/log
[ "$status" = "104" ]
+if checkBuildTempDirRemoved $TEST_ROOT/log; then false; fi
clearStore
diff --git a/tests/common.sh.in b/tests/common.sh.in
index 15d7b1ef9..dd7e61822 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -11,6 +11,7 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_CONF_DIR=$TEST_ROOT/etc
+unset NIX_USER_CONF_FILES
export _NIX_TEST_SHARED=$TEST_ROOT/shared
if [[ -n $NIX_STORE ]]; then
export _NIX_TEST_NO_SANDBOX=1
@@ -21,6 +22,8 @@ export NIX_REMOTE=$NIX_REMOTE_
unset NIX_PATH
export TEST_HOME=$TEST_ROOT/test-home
export HOME=$TEST_HOME
+unset XDG_CONFIG_HOME
+unset XDG_CONFIG_DIRS
unset XDG_CACHE_HOME
mkdir -p $TEST_HOME
diff --git a/tests/config.sh b/tests/config.sh
new file mode 100644
index 000000000..8fa349f11
--- /dev/null
+++ b/tests/config.sh
@@ -0,0 +1,18 @@
+source common.sh
+
+# Test that files are loaded from XDG by default
+export XDG_CONFIG_HOME=/tmp/home
+export XDG_CONFIG_DIRS=/tmp/dir1:/tmp/dir2
+files=$(nix-build --verbose --version | grep "User config" | cut -d ':' -f2- | xargs)
+[[ $files == "/tmp/home/nix/nix.conf:/tmp/dir1/nix/nix.conf:/tmp/dir2/nix/nix.conf" ]]
+
+# Test that setting NIX_USER_CONF_FILES overrides all the default user config files
+export NIX_USER_CONF_FILES=/tmp/file1.conf:/tmp/file2.conf
+files=$(nix-build --verbose --version | grep "User config" | cut -d ':' -f2- | xargs)
+[[ $files == "/tmp/file1.conf:/tmp/file2.conf" ]]
+
+# Test that it's possible to load the config from a custom location
+here=$(readlink -f "$(dirname "${BASH_SOURCE[0]}")")
+export NIX_USER_CONF_FILES=$here/config/nix-with-substituters.conf
+var=$(nix show-config | grep '^substituters =' | cut -d '=' -f 2 | xargs)
+[[ $var == https://example.com ]]
diff --git a/tests/config/nix-with-substituters.conf b/tests/config/nix-with-substituters.conf
new file mode 100644
index 000000000..90f359a6f
--- /dev/null
+++ b/tests/config/nix-with-substituters.conf
@@ -0,0 +1,2 @@
+experimental-features = nix-command
+substituters = https://example.com
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
index ed8fa14d6..d9c9874f5 100644
--- a/tests/fetchGit.sh
+++ b/tests/fetchGit.sh
@@ -11,7 +11,7 @@ repo=$TEST_ROOT/git
export _NIX_FORCE_HTTP=1
-rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix/gitv2
+rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix $TEST_ROOT/worktree $TEST_ROOT/shallow
git init $repo
git -C $repo config user.email "foobar@example.com"
@@ -25,8 +25,16 @@ rev1=$(git -C $repo rev-parse HEAD)
echo world > $repo/hello
git -C $repo commit -m 'Bla2' -a
+git -C $repo worktree add $TEST_ROOT/worktree
+echo hello >> $TEST_ROOT/worktree/hello
rev2=$(git -C $repo rev-parse HEAD)
+# Fetch a worktree
+unset _NIX_FORCE_HTTP
+path0=$(nix eval --raw "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath")
+export _NIX_FORCE_HTTP=1
+[[ $(tail -n 1 $path0/hello) = "hello" ]]
+
# Fetch the default branch.
path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
[[ $(cat $path/hello) = world ]]
@@ -50,9 +58,6 @@ path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
[[ $(nix eval "(builtins.fetchGit file://$repo).revCount") = 2 ]]
[[ $(nix eval --raw "(builtins.fetchGit file://$repo).rev") = $rev2 ]]
-# But with TTL 0, it should fail.
-(! nix eval --tarball-ttl 0 "(builtins.fetchGit file://$repo)" -vvvvv)
-
# Fetching with a explicit hash should succeed.
path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path2 ]]
@@ -74,6 +79,7 @@ echo bar > $repo/dir2/bar
git -C $repo add dir1/foo
git -C $repo rm hello
+unset _NIX_FORCE_HTTP
path2=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
[ ! -e $path2/hello ]
[ ! -e $path2/bar ]
@@ -110,9 +116,9 @@ path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
git -C $repo checkout $rev2 -b dev
echo dev > $repo/hello
-# File URI uses 'master' unless specified otherwise
+# File URI uses dirty tree unless specified otherwise
path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
-[[ $path = $path2 ]]
+[ $(cat $path2/hello) = dev ]
# Using local path with branch other than 'master' should work when clean or dirty
path3=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
@@ -131,9 +137,9 @@ path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outP
# Nuke the cache
-rm -rf $TEST_HOME/.cache/nix/gitv2
+rm -rf $TEST_HOME/.cache/nix
-# Try again, but without 'git' on PATH
+# Try again, but without 'git' on PATH. This should fail.
NIX=$(command -v nix)
# This should fail
(! PATH= $NIX eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath" )
@@ -141,3 +147,13 @@ NIX=$(command -v nix)
# Try again, with 'git' available. This should work.
path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
[[ $path3 = $path5 ]]
+
+# Fetching a shallow repo shouldn't work by default, because we can't
+# return a revCount.
+git clone --depth 1 file://$repo $TEST_ROOT/shallow
+(! nix eval --raw "(builtins.fetchGit { url = $TEST_ROOT/shallow; ref = \"dev\"; }).outPath")
+
+# But you can request a shallow clone, which won't return a revCount.
+path6=$(nix eval --raw "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).outPath")
+[[ $path3 = $path6 ]]
+[[ $(nix eval "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]]
diff --git a/tests/fetchGitSubmodules.sh b/tests/fetchGitSubmodules.sh
new file mode 100644
index 000000000..4c2c13f1a
--- /dev/null
+++ b/tests/fetchGitSubmodules.sh
@@ -0,0 +1,97 @@
+source common.sh
+
+set -u
+
+if [[ -z $(type -p git) ]]; then
+ echo "Git not installed; skipping Git submodule tests"
+ exit 99
+fi
+
+clearStore
+
+rootRepo=$TEST_ROOT/gitSubmodulesRoot
+subRepo=$TEST_ROOT/gitSubmodulesSub
+
+rm -rf ${rootRepo} ${subRepo} $TEST_HOME/.cache/nix
+
+initGitRepo() {
+ git init $1
+ git -C $1 config user.email "foobar@example.com"
+ git -C $1 config user.name "Foobar"
+}
+
+addGitContent() {
+ echo "lorem ipsum" > $1/content
+ git -C $1 add content
+ git -C $1 commit -m "Initial commit"
+}
+
+initGitRepo $subRepo
+addGitContent $subRepo
+
+initGitRepo $rootRepo
+
+git -C $rootRepo submodule init
+git -C $rootRepo submodule add $subRepo sub
+git -C $rootRepo add sub
+git -C $rootRepo commit -m "Add submodule"
+
+rev=$(git -C $rootRepo rev-parse HEAD)
+
+r1=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath")
+r2=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = false; }).outPath")
+r3=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
+
+[[ $r1 == $r2 ]]
+[[ $r2 != $r3 ]]
+
+r4=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; }).outPath")
+r5=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = false; }).outPath")
+r6=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
+r7=$(nix eval --raw "(builtins.fetchGit { url = $rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
+r8=$(nix eval --raw "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
+
+[[ $r1 == $r4 ]]
+[[ $r4 == $r5 ]]
+[[ $r3 == $r6 ]]
+[[ $r6 == $r7 ]]
+[[ $r7 == $r8 ]]
+
+have_submodules=$(nix eval "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; }).submodules")
+[[ $have_submodules == false ]]
+
+have_submodules=$(nix eval "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = false; }).submodules")
+[[ $have_submodules == false ]]
+
+have_submodules=$(nix eval "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).submodules")
+[[ $have_submodules == true ]]
+
+pathWithoutSubmodules=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath")
+pathWithSubmodules=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
+pathWithSubmodulesAgain=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
+pathWithSubmodulesAgainWithRef=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
+
+# The resulting store path cannot be the same.
+[[ $pathWithoutSubmodules != $pathWithSubmodules ]]
+
+# Checking out the same repo with submodules returns in the same store path.
+[[ $pathWithSubmodules == $pathWithSubmodulesAgain ]]
+
+# Checking out the same repo with submodules returns in the same store path.
+[[ $pathWithSubmodulesAgain == $pathWithSubmodulesAgainWithRef ]]
+
+# The submodules flag is actually honored.
+[[ ! -e $pathWithoutSubmodules/sub/content ]]
+[[ -e $pathWithSubmodules/sub/content ]]
+
+[[ -e $pathWithSubmodulesAgainWithRef/sub/content ]]
+
+# No .git directory or submodule reference files must be left
+test "$(find "$pathWithSubmodules" -name .git)" = ""
+
+# Git repos without submodules can be fetched with submodules = true.
+subRev=$(git -C $subRepo rev-parse HEAD)
+noSubmoduleRepoBaseline=$(nix eval --raw "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; }).outPath")
+noSubmoduleRepo=$(nix eval --raw "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; submodules = true; }).outPath")
+
+[[ $noSubmoduleRepoBaseline == $noSubmoduleRepo ]]
diff --git a/tests/init.sh b/tests/init.sh
index 6a119aad0..c62c4856a 100644
--- a/tests/init.sh
+++ b/tests/init.sh
@@ -17,7 +17,7 @@ cat > "$NIX_CONF_DIR"/nix.conf <<EOF
build-users-group =
keep-derivations = false
sandbox = false
-experimental-features = nix-command
+experimental-features = nix-command flakes
include nix.conf.extra
EOF
diff --git a/tests/lang/eval-okay-getattrpos-functionargs.exp b/tests/lang/eval-okay-getattrpos-functionargs.exp
new file mode 100644
index 000000000..7f9ac40e8
--- /dev/null
+++ b/tests/lang/eval-okay-getattrpos-functionargs.exp
@@ -0,0 +1 @@
+{ column = 11; file = "eval-okay-getattrpos-functionargs.nix"; line = 2; }
diff --git a/tests/lang/eval-okay-getattrpos-functionargs.nix b/tests/lang/eval-okay-getattrpos-functionargs.nix
new file mode 100644
index 000000000..11d6bb0e3
--- /dev/null
+++ b/tests/lang/eval-okay-getattrpos-functionargs.nix
@@ -0,0 +1,4 @@
+let
+ fun = { foo }: {};
+ pos = builtins.unsafeGetAttrPos "foo" (builtins.functionArgs fun);
+in { inherit (pos) column line; file = baseNameOf pos.file; }
diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh
index 52967d07d..16abd974c 100644
--- a/tests/linux-sandbox.sh
+++ b/tests/linux-sandbox.sh
@@ -28,3 +28,10 @@ nix cat-store $outPath/foobar | grep FOOBAR
# Test --check without hash rewriting.
nix-build dependencies.nix --no-out-link --check --sandbox-paths /nix/store
+
+# Test that sandboxed builds with --check and -K can move .check directory to store
+nix-build check.nix -A nondeterministic --sandbox-paths /nix/store --no-out-link
+
+(! nix-build check.nix -A nondeterministic --sandbox-paths /nix/store --no-out-link --check -K 2> $TEST_ROOT/log)
+if grep -q 'error: renaming' $TEST_ROOT/log; then false; fi
+grep -q 'may not be deterministic' $TEST_ROOT/log
diff --git a/tests/local.mk b/tests/local.mk
index dab3a23b6..56e5640ca 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -1,5 +1,6 @@
nix_tests = \
init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \
+ config.sh \
gc.sh \
gc-concurrent.sh \
gc-auto.sh \
@@ -17,9 +18,10 @@ nix_tests = \
nar-access.sh \
structured-attrs.sh \
fetchGit.sh \
+ fetchGitSubmodules.sh \
fetchMercurial.sh \
signing.sh \
- run.sh \
+ shell.sh \
brotli.sh \
pure-eval.sh \
check.sh \
diff --git a/tests/run.sh b/tests/run.sh
deleted file mode 100644
index d1dbfd6bd..000000000
--- a/tests/run.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-source common.sh
-
-clearStore
-clearCache
-
-nix run -f run.nix hello -c hello | grep 'Hello World'
-nix run -f run.nix hello -c hello NixOS | grep 'Hello NixOS'
-
-if ! canUseSandbox; then exit; fi
-
-chmod -R u+w $TEST_ROOT/store0 || true
-rm -rf $TEST_ROOT/store0
-
-clearStore
-
-path=$(nix eval --raw -f run.nix hello)
-
-# Note: we need the sandbox paths to ensure that the shell is
-# visible in the sandbox.
-nix run --sandbox-build-dir /build-tmp \
- --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \
- --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World'
-
-path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello')
-
-[[ $path/bin/hello = $path2 ]]
-
-[[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]]
diff --git a/tests/run.nix b/tests/shell-hello.nix
index 77dcbd2a9..77dcbd2a9 100644
--- a/tests/run.nix
+++ b/tests/shell-hello.nix
diff --git a/tests/shell.sh b/tests/shell.sh
new file mode 100644
index 000000000..7a9ee8ab0
--- /dev/null
+++ b/tests/shell.sh
@@ -0,0 +1,28 @@
+source common.sh
+
+clearStore
+clearCache
+
+nix shell -f shell-hello.nix hello -c hello | grep 'Hello World'
+nix shell -f shell-hello.nix hello -c hello NixOS | grep 'Hello NixOS'
+
+if ! canUseSandbox; then exit; fi
+
+chmod -R u+w $TEST_ROOT/store0 || true
+rm -rf $TEST_ROOT/store0
+
+clearStore
+
+path=$(nix eval --raw -f shell-hello.nix hello)
+
+# Note: we need the sandbox paths to ensure that the shell is
+# visible in the sandbox.
+nix shell --sandbox-build-dir /build-tmp \
+ --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \
+ --store $TEST_ROOT/store0 -f shell-hello.nix hello -c hello | grep 'Hello World'
+
+path2=$(nix shell --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f shell-hello.nix hello -c $SHELL -c 'type -p hello')
+
+[[ $path/bin/hello = $path2 ]]
+
+[[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]]
diff --git a/tests/tarball.sh b/tests/tarball.sh
index 8adb8d72f..b3ec16d40 100644
--- a/tests/tarball.sh
+++ b/tests/tarball.sh
@@ -10,6 +10,8 @@ mkdir -p $tarroot
cp dependencies.nix $tarroot/default.nix
cp config.nix dependencies.builder*.sh $tarroot/
+hash=$(nix hash-path $tarroot)
+
test_tarball() {
local ext="$1"
local compressor="$2"
@@ -25,6 +27,11 @@ test_tarball() {
nix-build -o $TEST_ROOT/result -E "import (fetchTarball file://$tarball)"
+ nix-build --experimental-features flakes -o $TEST_ROOT/result -E "import (fetchTree file://$tarball)"
+ nix-build --experimental-features flakes -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; })"
+ nix-build --experimental-features flakes -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })"
+ nix-build --experimental-features flakes -o $TEST_ROOT/result -E "import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"sha256-xdKv2pq/IiwLSnBBJXW8hNowI4MrdZfW+SYqDQs7Tzc=\"; })" 2>&1 | grep 'NAR hash mismatch in input'
+
nix-instantiate --eval -E '1 + 2' -I fnord=file://no-such-tarball.tar$ext
nix-instantiate --eval -E 'with <fnord/xyzzy>; 1 + 2' -I fnord=file://no-such-tarball$ext
(! nix-instantiate --eval -E '<fnord/xyzzy> 1' -I fnord=file://no-such-tarball$ext)