aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/ISSUE_TEMPLATE.md4
-rw-r--r--.gitignore1
-rw-r--r--Makefile3
-rw-r--r--configure.ac3
-rw-r--r--corepkgs/fetchurl.nix6
-rw-r--r--doc/manual/command-ref/conf-file.xml51
-rw-r--r--doc/manual/command-ref/env-common.xml2
-rw-r--r--doc/manual/command-ref/nix-build.xml21
-rw-r--r--doc/manual/command-ref/opt-common-syn.xml1
-rw-r--r--doc/manual/command-ref/opt-common.xml7
-rw-r--r--doc/manual/expressions/builtins.xml74
-rw-r--r--doc/manual/expressions/debug-build.xml34
-rw-r--r--doc/manual/expressions/language-constructs.xml32
-rw-r--r--doc/manual/expressions/simple-building-testing.xml2
-rw-r--r--doc/manual/installation/installing-binary.xml10
-rw-r--r--doc/manual/installation/supported-platforms.xml3
-rw-r--r--doc/manual/introduction/quick-start.xml2
-rw-r--r--doc/manual/manual.xml6
-rw-r--r--doc/manual/release-notes/release-notes.xml2
-rw-r--r--doc/manual/release-notes/rl-2.0.xml (renamed from doc/manual/release-notes/rl-1.12.xml)26
-rw-r--r--doc/manual/troubleshooting/collisions-nixenv.xml38
-rw-r--r--doc/manual/troubleshooting/links-nix-store.xml43
-rw-r--r--doc/manual/troubleshooting/troubleshooting.xml16
-rw-r--r--mk/libraries.mk7
-rw-r--r--mk/tests.mk2
-rw-r--r--release-common.nix6
-rw-r--r--release.nix19
-rw-r--r--scripts/install-darwin-multi-user.sh6
-rw-r--r--scripts/nix-profile.sh.in2
-rw-r--r--src/build-remote/build-remote.cc10
-rw-r--r--src/libexpr/eval.cc103
-rw-r--r--src/libexpr/eval.hh19
-rw-r--r--src/libexpr/primops.cc181
-rw-r--r--src/libexpr/primops.hh3
-rw-r--r--src/libexpr/primops/fetchGit.cc16
-rw-r--r--src/libexpr/primops/fetchMercurial.cc6
-rw-r--r--src/libmain/common-args.cc4
-rw-r--r--src/libmain/shared.cc3
-rw-r--r--src/libmain/shared.hh1
-rw-r--r--src/libstore/binary-cache-store.cc2
-rw-r--r--src/libstore/binary-cache-store.hh2
-rw-r--r--src/libstore/build.cc127
-rw-r--r--src/libstore/download.cc42
-rw-r--r--src/libstore/download.hh2
-rw-r--r--src/libstore/gc.cc4
-rw-r--r--src/libstore/globals.cc15
-rw-r--r--src/libstore/globals.hh25
-rw-r--r--src/libstore/http-binary-cache-store.cc11
-rw-r--r--src/libstore/local-store.cc3
-rw-r--r--src/libstore/local-store.hh5
-rw-r--r--src/libstore/local.mk3
-rw-r--r--src/libstore/pathlocks.cc6
-rw-r--r--src/libstore/pathlocks.hh6
-rw-r--r--src/libstore/remote-store.cc26
-rw-r--r--src/libstore/remote-store.hh2
-rw-r--r--src/libstore/store-api.cc11
-rw-r--r--src/libstore/store-api.hh23
-rw-r--r--src/libutil/compression.cc32
-rw-r--r--src/libutil/compression.hh4
-rw-r--r--src/libutil/logging.cc2
-rw-r--r--src/libutil/util.cc71
-rw-r--r--src/libutil/util.hh16
-rwxr-xr-xsrc/nix-build/nix-build.cc11
-rwxr-xr-xsrc/nix-channel/nix-channel.cc3
-rw-r--r--src/nix-collect-garbage/nix-collect-garbage.cc2
-rwxr-xr-xsrc/nix-copy-closure/nix-copy-closure.cc2
-rw-r--r--src/nix-daemon/nix-daemon.cc6
-rw-r--r--src/nix-env/nix-env.cc2
-rw-r--r--src/nix-instantiate/nix-instantiate.cc4
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc2
-rw-r--r--src/nix-store/nix-store.cc3
-rw-r--r--src/nix/build.cc2
-rw-r--r--src/nix/command.hh4
-rw-r--r--src/nix/eval.cc28
-rw-r--r--src/nix/installables.cc10
-rw-r--r--src/nix/local.mk2
-rw-r--r--src/nix/log.cc1
-rw-r--r--src/nix/main.cc7
-rw-r--r--src/nix/progress-bar.cc93
-rw-r--r--src/nix/upgrade-nix.cc131
-rw-r--r--src/nlohmann/json.hpp1713
-rw-r--r--tests/build-remote.sh2
-rw-r--r--tests/check.nix17
-rw-r--r--tests/check.sh32
-rw-r--r--tests/common.sh.in19
-rw-r--r--tests/fetchGit.sh20
-rw-r--r--tests/fetchMercurial.sh7
-rw-r--r--tests/fixed.sh13
-rw-r--r--tests/lang/data1
-rw-r--r--tests/lang/eval-okay-path.nix7
-rw-r--r--tests/linux-sandbox.sh2
-rw-r--r--tests/local.mk7
-rw-r--r--tests/misc.sh2
-rw-r--r--tests/nix-copy-closure.nix8
-rw-r--r--tests/plugins.sh7
-rw-r--r--tests/plugins/local.mk9
-rw-r--r--tests/plugins/plugintest.cc10
-rw-r--r--tests/pure-eval.nix3
-rw-r--r--tests/pure-eval.sh18
-rw-r--r--tests/remote-builds.nix10
-rw-r--r--tests/restricted.nix1
-rw-r--r--tests/restricted.sh12
-rw-r--r--tests/run.sh27
-rwxr-xr-xtests/shell.shebang.sh2
-rw-r--r--tests/user-envs.sh3
-rw-r--r--version2
106 files changed, 2147 insertions, 1335 deletions
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
index cdf642e6a..3372b1f03 100644
--- a/.github/ISSUE_TEMPLATE.md
+++ b/.github/ISSUE_TEMPLATE.md
@@ -14,7 +14,7 @@ Examples of _Nix_ issues:
- Nix segfaults when I run `nix-build -A blahblah`
- The Nix language needs a new builtin: `builtins.foobar`
-- Regression in the behavior of `nix-env` in Nix 1.12
+- Regression in the behavior of `nix-env` in Nix 2.0
Examples of _nixpkgs_ issues:
@@ -24,4 +24,4 @@ Examples of _nixpkgs_ issues:
Chances are if you're a newcomer to the Nix world, you'll probably want the [nixpkgs tracker](https://github.com/NixOS/nixpkgs/issues). It also gets a lot more eyeball traffic so you'll probably get a response a lot more quickly.
---> \ No newline at end of file
+-->
diff --git a/.gitignore b/.gitignore
index ce22fa007..0a9599378 100644
--- a/.gitignore
+++ b/.gitignore
@@ -38,6 +38,7 @@ perl/Makefile.config
/scripts/nix-copy-closure
/scripts/nix-reduce-build
/scripts/nix-http-export.cgi
+/scripts/nix-profile-daemon.sh
# /src/libexpr/
/src/libexpr/lexer-tab.cc
diff --git a/Makefile b/Makefile
index 5d8e990cc..c867823fc 100644
--- a/Makefile
+++ b/Makefile
@@ -24,7 +24,8 @@ makefiles = \
misc/launchd/local.mk \
misc/upstart/local.mk \
doc/manual/local.mk \
- tests/local.mk
+ tests/local.mk \
+ tests/plugins/local.mk
GLOBAL_CXXFLAGS += -std=c++14 -g -Wall -include config.h
diff --git a/configure.ac b/configure.ac
index 9db92ce91..83b2346d0 100644
--- a/configure.ac
+++ b/configure.ac
@@ -61,6 +61,7 @@ CFLAGS=
CXXFLAGS=
AC_PROG_CC
AC_PROG_CXX
+AC_PROG_CPP
AX_CXX_COMPILE_STDCXX_11
@@ -199,7 +200,7 @@ AC_SUBST(ENABLE_S3, [$enable_s3])
AC_LANG_POP(C++)
if test -n "$enable_s3"; then
- declare -a aws_version_tokens=($(printf '#include <aws/core/VersionConfig.h>\nAWS_SDK_VERSION_STRING' | cpp -E | grep -v '^#.*' | sed 's/"//g' | tr '.' ' '))
+ declare -a aws_version_tokens=($(printf '#include <aws/core/VersionConfig.h>\nAWS_SDK_VERSION_STRING' | $CPP - | grep -v '^#.*' | sed 's/"//g' | tr '.' ' '))
AC_DEFINE_UNQUOTED([AWS_VERSION_MAJOR], ${aws_version_tokens@<:@0@:>@}, [Major version of aws-sdk-cpp.])
AC_DEFINE_UNQUOTED([AWS_VERSION_MINOR], ${aws_version_tokens@<:@1@:>@}, [Minor version of aws-sdk-cpp.])
fi
diff --git a/corepkgs/fetchurl.nix b/corepkgs/fetchurl.nix
index e135b947f..0ce1bab11 100644
--- a/corepkgs/fetchurl.nix
+++ b/corepkgs/fetchurl.nix
@@ -1,4 +1,4 @@
-{ system ? builtins.currentSystem
+{ system ? "" # obsolete
, url
, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? ""
, outputHash ?
@@ -17,7 +17,9 @@ derivation {
inherit outputHashAlgo outputHash;
outputHashMode = if unpack || executable then "recursive" else "flat";
- inherit name system url executable unpack;
+ inherit name url executable unpack;
+
+ system = "builtin";
# No need to double the amount of network traffic
preferLocalBuild = true;
diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml
index e52cbcd53..5c4561f66 100644
--- a/doc/manual/command-ref/conf-file.xml
+++ b/doc/manual/command-ref/conf-file.xml
@@ -312,7 +312,7 @@ false</literal>.</para>
</varlistentry>
- <varlistentry><term><literal>use-substitutes</literal></term>
+ <varlistentry><term><literal>substitute</literal></term>
<listitem><para>If set to <literal>true</literal> (default), Nix
will use binary substitutes if available. This option can be
@@ -321,6 +321,20 @@ false</literal>.</para>
</varlistentry>
+ <varlistentry><term><literal>builders-use-substitutes</literal></term>
+
+ <listitem><para>If set to <literal>true</literal>, Nix will instruct
+ remote build machines to use their own binary substitutes if available. In
+ practical terms, this means that remote hosts will fetch as many build
+ dependencies as possible from their own substitutes (e.g, from
+ <literal>cache.nixos.org</literal>), instead of waiting for this host to
+ upload them all. This can drastically reduce build times if the network
+ connection between this computer and the remote build host is slow. Defaults
+ to <literal>false</literal>.</para></listitem>
+
+ </varlistentry>
+
+
<varlistentry><term><literal>fallback</literal></term>
<listitem><para>If set to <literal>true</literal>, Nix will fall
@@ -720,6 +734,41 @@ builtins.fetchurl {
</varlistentry>
+ <varlistentry xml:id="conf-show-trace"><term><literal>show-trace</literal></term>
+
+ <listitem><para>Causes Nix to print out a stack trace in case of Nix
+ expression evaluation errors.</para></listitem>
+
+ </varlistentry>
+
+
+ <varlistentry xml:id="conf-plugin-files">
+ <term><literal>plugin-files</literal></term>
+ <listitem>
+ <para>
+ A list of plugin files to be loaded by Nix. Each of these
+ files will be dlopened by Nix, allowing them to affect
+ execution through static initialization. In particular, these
+ plugins may construct static instances of RegisterPrimOp to
+ add new primops or constants to the expression language,
+ RegisterStoreImplementation to add new store implementations,
+ and RegisterCommand to add new subcommands to the
+ <literal>nix</literal> command. See the constructors for those
+ types for more details.
+ </para>
+ <para>
+ Since these files are loaded into the same address space as
+ Nix itself, they must be DSOs compatible with the instance of
+ Nix running at the time (i.e. compiled against the same
+ headers, not linked to any incompatible libraries). They
+ should not be linked to any Nix libs directly, as those will
+ be available already at load time.
+ </para>
+ </listitem>
+
+ </varlistentry>
+
+
</variablelist>
</para>
diff --git a/doc/manual/command-ref/env-common.xml b/doc/manual/command-ref/env-common.xml
index a83aeaf2e..361d3e2b0 100644
--- a/doc/manual/command-ref/env-common.xml
+++ b/doc/manual/command-ref/env-common.xml
@@ -154,6 +154,8 @@ $ mount -o bind /mnt/otherdisk/nix /nix</screen>
<literal>daemon</literal> if you want to use the Nix daemon to
execute Nix operations. This is necessary in <link
linkend="ssec-multi-user">multi-user Nix installations</link>.
+ If the Nix daemon's Unix socket is at some non-standard path,
+ this variable should be set to <literal>unix://path/to/socket</literal>.
Otherwise, it should be left unset.</para></listitem>
</varlistentry>
diff --git a/doc/manual/command-ref/nix-build.xml b/doc/manual/command-ref/nix-build.xml
index d6b2e5e5a..40fe7a43f 100644
--- a/doc/manual/command-ref/nix-build.xml
+++ b/doc/manual/command-ref/nix-build.xml
@@ -29,8 +29,6 @@
</group>
<replaceable>attrPath</replaceable>
</arg>
- <arg><option>--drv-link</option> <replaceable>drvlink</replaceable></arg>
- <arg><option>--add-drv-link</option></arg>
<arg><option>--no-out-link</option></arg>
<arg>
<group choice='req'>
@@ -91,25 +89,6 @@ also <xref linkend="sec-common-options" />.</phrase></para>
<variablelist>
- <varlistentry><term><option>--drv-link</option> <replaceable>drvlink</replaceable></term>
-
- <listitem><para>Add a symlink named
- <replaceable>drvlink</replaceable> to the store derivation
- produced by <command>nix-instantiate</command>. The derivation is
- a root of the garbage collector until the symlink is deleted or
- renamed. If there are multiple derivations, numbers are suffixed
- to <replaceable>drvlink</replaceable> to distinguish between
- them.</para></listitem>
-
- </varlistentry>
-
- <varlistentry><term><option>--add-drv-link</option></term>
-
- <listitem><para>Shorthand for <option>--drv-link</option>
- <filename>./derivation</filename>.</para></listitem>
-
- </varlistentry>
-
<varlistentry><term><option>--no-out-link</option></term>
<listitem><para>Do not create a symlink to the output path. Note
diff --git a/doc/manual/command-ref/opt-common-syn.xml b/doc/manual/command-ref/opt-common-syn.xml
index 3aff4e1b6..168bef080 100644
--- a/doc/manual/command-ref/opt-common-syn.xml
+++ b/doc/manual/command-ref/opt-common-syn.xml
@@ -47,7 +47,6 @@
</arg>
<arg><option>--fallback</option></arg>
<arg><option>--readonly-mode</option></arg>
-<arg><option>--show-trace</option></arg>
<arg>
<option>-I</option>
<replaceable>path</replaceable>
diff --git a/doc/manual/command-ref/opt-common.xml b/doc/manual/command-ref/opt-common.xml
index 32d53c753..bcb60b301 100644
--- a/doc/manual/command-ref/opt-common.xml
+++ b/doc/manual/command-ref/opt-common.xml
@@ -301,13 +301,6 @@
</varlistentry>
-<varlistentry><term><option>--show-trace</option></term>
-
- <listitem><para>Causes Nix to print out a stack trace in case of Nix
- expression evaluation errors.</para></listitem>
-
-</varlistentry>
-
<varlistentry xml:id="opt-I"><term><option>-I</option> <replaceable>path</replaceable></term>
diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml
index 5a3a8645c..81770bcf6 100644
--- a/doc/manual/expressions/builtins.xml
+++ b/doc/manual/expressions/builtins.xml
@@ -308,8 +308,9 @@ stdenv.mkDerivation { … }
</varlistentry>
- <varlistentry><term><function>builtins.filterSource</function>
- <replaceable>e1</replaceable> <replaceable>e2</replaceable></term>
+ <varlistentry xml:id='builtin-filterSource'>
+ <term><function>builtins.filterSource</function>
+ <replaceable>e1</replaceable> <replaceable>e2</replaceable></term>
<listitem>
@@ -768,6 +769,75 @@ Evaluates to <literal>[ "foo" ]</literal>.
</varlistentry>
+ <varlistentry>
+ <term>
+ <function>builtins.path</function>
+ <replaceable>args</replaceable>
+ </term>
+
+ <listitem>
+ <para>
+ An enrichment of the built-in path type, based on the attributes
+ present in <replaceable>args</replaceable>. All are optional
+ except <varname>path</varname>:
+ </para>
+
+ <variablelist>
+ <varlistentry>
+ <term>path</term>
+ <listitem>
+ <para>The underlying path.</para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term>name</term>
+ <listitem>
+ <para>
+ The name of the path when added to the store. This can
+ used to reference paths that have nix-illegal characters
+ in their names, like <literal>@</literal>.
+ </para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term>filter</term>
+ <listitem>
+ <para>
+ A function of the type expected by
+ <link linkend="builtin-filterSource">builtins.filterSource</link>,
+ with the same semantics.
+ </para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term>recursive</term>
+ <listitem>
+ <para>
+ When <literal>false</literal>, when
+ <varname>path</varname> is added to the store it is with a
+ flat hash, rather than a hash of the NAR serialization of
+ the file. Thus, <varname>path</varname> must refer to a
+ regular file, not a directory. This allows similar
+ behavior to <literal>fetchurl</literal>. Defaults to
+ <literal>true</literal>.
+ </para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term>sha256</term>
+ <listitem>
+ <para>
+ When provided, this is the expected hash of the file at
+ the path. Evaluation will fail if the hash is incorrect,
+ and providing a hash allows
+ <literal>builtins.path</literal> to be used even when the
+ <literal>pure-eval</literal> nix config option is on.
+ </para>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+ </listitem>
+ </varlistentry>
<varlistentry><term><function>builtins.pathExists</function>
<replaceable>path</replaceable></term>
diff --git a/doc/manual/expressions/debug-build.xml b/doc/manual/expressions/debug-build.xml
deleted file mode 100644
index 0c1f4e671..000000000
--- a/doc/manual/expressions/debug-build.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<section xmlns="http://docbook.org/ns/docbook"
- xmlns:xlink="http://www.w3.org/1999/xlink"
- xmlns:xi="http://www.w3.org/2001/XInclude"
- version="5.0"
- xml:id="sec-debug-build">
-
-<title>Debugging Build Failures</title>
-
-<para>At the beginning of each phase of the build (such as unpacking,
-building or installing), the set of all shell variables is written to
-the file <filename>env-vars</filename> at the top-level build
-directory. This is useful for debugging: it allows you to recreate
-the environment in which a build was performed. For instance, if a
-build fails, then assuming you used the <option>-K</option> flag, you
-can go to the output directory and <quote>switch</quote> to the
-environment of the builder:
-
-<screen>
-$ nix-build -K ./foo.nix
-... fails, keeping build directory `/tmp/nix-1234-0'
-
-$ cd /tmp/nix-1234-0
-
-$ source env-vars
-
-<lineannotation>(edit some files...)</lineannotation>
-
-$ make
-
-<lineannotation>(execution continues with the same GCC, make, etc.)</lineannotation></screen>
-
-</para>
-
-</section>
diff --git a/doc/manual/expressions/language-constructs.xml b/doc/manual/expressions/language-constructs.xml
index 2f0027d47..47d95f8a1 100644
--- a/doc/manual/expressions/language-constructs.xml
+++ b/doc/manual/expressions/language-constructs.xml
@@ -61,7 +61,7 @@ evaluates to <literal>"foobar"</literal>.
<simplesect><title>Inheriting attributes</title>
-<para>When defining a set it is often convenient to copy variables
+<para>When defining a set or in a let-expression it is often convenient to copy variables
from the surrounding lexical scope (e.g., when you want to propagate
attributes). This can be shortened using the
<literal>inherit</literal> keyword. For instance,
@@ -72,7 +72,15 @@ let x = 123; in
y = 456;
}</programlisting>
-evaluates to <literal>{ x = 123; y = 456; }</literal>. (Note that
+is equivalent to
+
+<programlisting>
+let x = 123; in
+{ x = x;
+ y = 456;
+}</programlisting>
+
+and both evaluate to <literal>{ x = 123; y = 456; }</literal>. (Note that
this works because <varname>x</varname> is added to the lexical scope
by the <literal>let</literal> construct.) It is also possible to
inherit attributes from another set. For instance, in this fragment
@@ -101,6 +109,26 @@ variables from the surrounding scope (<varname>fetchurl</varname>
<varname>libXaw</varname> (the X Athena Widgets) from the
<varname>xlibs</varname> (X11 client-side libraries) set.</para>
+<para>
+Summarizing the fragment
+
+<programlisting>
+...
+inherit x y z;
+inherit (src-set) a b c;
+...</programlisting>
+
+is equivalent to
+
+<programlisting>
+...
+x = x; y = y; z = z;
+a = src-set.a; b = src-set.b; c = src-set.c;
+...</programlisting>
+
+when used while defining local variables in a let-expression or
+while defining a set.</para>
+
</simplesect>
diff --git a/doc/manual/expressions/simple-building-testing.xml b/doc/manual/expressions/simple-building-testing.xml
index bd3901a13..0348c082b 100644
--- a/doc/manual/expressions/simple-building-testing.xml
+++ b/doc/manual/expressions/simple-building-testing.xml
@@ -81,6 +81,4 @@ Just pass the option <link linkend='opt-max-jobs'><option>-j
in parallel, or set. Typically this should be the number of
CPUs.</para>
-<xi:include href="debug-build.xml" />
-
</section>
diff --git a/doc/manual/installation/installing-binary.xml b/doc/manual/installation/installing-binary.xml
index 24e76eafe..7e8dfb0db 100644
--- a/doc/manual/installation/installing-binary.xml
+++ b/doc/manual/installation/installing-binary.xml
@@ -79,16 +79,6 @@ alice$ ./install
</para>
-<para>Nix can be uninstalled using <command>rpm -e nix</command> or
-<command>dpkg -r nix</command> on RPM- and Dpkg-based systems,
-respectively. After this you should manually remove the Nix store and
-other auxiliary data, if desired:
-
-<screen>
-$ rm -rf /nix</screen>
-
-</para>
-
<para>You can uninstall Nix simply by running:
<screen>
diff --git a/doc/manual/installation/supported-platforms.xml b/doc/manual/installation/supported-platforms.xml
index a468a5640..6858573ff 100644
--- a/doc/manual/installation/supported-platforms.xml
+++ b/doc/manual/installation/supported-platforms.xml
@@ -33,7 +33,4 @@
</para>
-<para>Nix is fairly portable, so it should work on most platforms that
-support POSIX threads and have a C++11 compiler.</para>
-
</chapter>
diff --git a/doc/manual/introduction/quick-start.xml b/doc/manual/introduction/quick-start.xml
index aa239b753..1ce6c8d50 100644
--- a/doc/manual/introduction/quick-start.xml
+++ b/doc/manual/introduction/quick-start.xml
@@ -15,7 +15,7 @@ to subsequent chapters.</para>
<step><para>Install single-user Nix by running the following:
<screen>
-$ curl https://nixos.org/nix/install | sh
+$ bash &lt;(curl https://nixos.org/nix/install)
</screen>
This will install Nix in <filename>/nix</filename>. The install script
diff --git a/doc/manual/manual.xml b/doc/manual/manual.xml
index 61205d916..b5a6af7d0 100644
--- a/doc/manual/manual.xml
+++ b/doc/manual/manual.xml
@@ -12,14 +12,11 @@
<firstname>Eelco</firstname>
<surname>Dolstra</surname>
</personname>
- <affiliation>
- <orgname>LogicBlox</orgname>
- </affiliation>
<contrib>Author</contrib>
</author>
<copyright>
- <year>2004-2014</year>
+ <year>2004-2017</year>
<holder>Eelco Dolstra</holder>
</copyright>
@@ -41,7 +38,6 @@
<xi:include href="expressions/writing-nix-expressions.xml" />
<xi:include href="advanced-topics/advanced-topics.xml" />
<xi:include href="command-ref/command-ref.xml" />
- <xi:include href="troubleshooting/troubleshooting.xml" />
<xi:include href="glossary/glossary.xml" />
<xi:include href="hacking.xml" />
<xi:include href="release-notes/release-notes.xml" />
diff --git a/doc/manual/release-notes/release-notes.xml b/doc/manual/release-notes/release-notes.xml
index c4b14bc54..b8392a647 100644
--- a/doc/manual/release-notes/release-notes.xml
+++ b/doc/manual/release-notes/release-notes.xml
@@ -12,7 +12,7 @@
</partintro>
-->
-<xi:include href="rl-1.12.xml" />
+<xi:include href="rl-2.0.xml" />
<xi:include href="rl-1.11.10.xml" />
<xi:include href="rl-1.11.xml" />
<xi:include href="rl-1.10.xml" />
diff --git a/doc/manual/release-notes/rl-1.12.xml b/doc/manual/release-notes/rl-2.0.xml
index 29943e3e6..effd2e39d 100644
--- a/doc/manual/release-notes/rl-1.12.xml
+++ b/doc/manual/release-notes/rl-2.0.xml
@@ -2,9 +2,9 @@
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
version="5.0"
- xml:id="ssec-relnotes-1.12">
+ xml:id="ssec-relnotes-2.0">
-<title>Release 1.12 (TBA)</title>
+<title>Release 2.0 (2018-02-??)</title>
<para>This release has the following new features:</para>
@@ -79,6 +79,11 @@
<listitem><para><command>nix add-to-store</command> (970366266b8df712f5f9cedb45af183ef5a8357f).</para></listitem>
+ <listitem><para><command>nix upgrade-nix</command> upgrades Nix
+ to the latest stable version. This requires that Nix is
+ installed in a profile. (Thus it won’t work on NixOS, or if it’s
+ installed outside of the Nix store.)</para></listitem>
+
<listitem><para>Progress indicator.</para></listitem>
<listitem><para>All options are available as flags now
@@ -94,11 +99,11 @@
</listitem>
<listitem>
- <para>New build mode <command>nix-build --hash</command> that
- builds a derivation, computes the hash of the output, and moves
- the output to the store path corresponding to what a fixed-output
- derivation with that hash would produce.
- (Add docs and examples; see d367b8e7875161e655deaa96bf8a5dd0bcf8229e)</para>
+ <para>If a fixed-output derivation produces a result with an
+ incorrect hash, the output path will be moved to the location
+ corresponding to the actual hash and registered as valid. Thus, a
+ subsequent build of the fixed-output derivation with the correct
+ hash is unnecessary.</para>
</listitem>
<listitem>
@@ -384,6 +389,13 @@ configureFlags = "--prefix=${placeholder "out"} --includedir=${placeholder "dev"
</para>
</listitem>
+ <listitem>
+ <para>
+ Nix can now be extended with plugins. See the documentation of
+ the 'plugin-files' option for more details.
+ </para>
+ </listitem>
+
</itemizedlist>
<para>Some features were removed:</para>
diff --git a/doc/manual/troubleshooting/collisions-nixenv.xml b/doc/manual/troubleshooting/collisions-nixenv.xml
deleted file mode 100644
index 23cc43faf..000000000
--- a/doc/manual/troubleshooting/collisions-nixenv.xml
+++ /dev/null
@@ -1,38 +0,0 @@
-<section xmlns="http://docbook.org/ns/docbook"
- xmlns:xlink="http://www.w3.org/1999/xlink"
- xmlns:xi="http://www.w3.org/2001/XInclude"
- version="5.0"
- xml:id="sec-collisions-nixenv">
-
-<title>Collisions in <command>nix-env</command></title>
-
-<para>Symptom: when installing or upgrading, you get an error message such as
-
-<screen>
-$ nix-env -i docbook-xml
-...
-adding /nix/store/s5hyxgm62gk2...-docbook-xml-4.2
-collision between `/nix/store/s5hyxgm62gk2...-docbook-xml-4.2/xml/dtd/docbook/calstblx.dtd'
- and `/nix/store/06h377hr4b33...-docbook-xml-4.3/xml/dtd/docbook/calstblx.dtd'
- at /nix/store/...-builder.pl line 62.</screen>
-
-</para>
-
-<para>The cause is that two installed packages in the user environment
-have overlapping filenames (e.g.,
-<filename>xml/dtd/docbook/calstblx.dtd</filename>. This usually
-happens when you accidentally try to install two versions of the same
-package. For instance, in the example above, the Nix Packages
-collection contains two versions of <literal>docbook-xml</literal>, so
-<command>nix-env -i</command> will try to install both. The default
-user environment builder has no way to way to resolve such conflicts,
-so it just gives up.</para>
-
-<para>Solution: remove one of the offending packages from the user
-environment (if already installed) using <command>nix-env
--e</command>, or specify exactly which version should be installed
-(e.g., <literal>nix-env -i docbook-xml-4.2</literal>).</para>
-
-<!-- FIXME: describe priorities -->
-
-</section>
diff --git a/doc/manual/troubleshooting/links-nix-store.xml b/doc/manual/troubleshooting/links-nix-store.xml
deleted file mode 100644
index c76888956..000000000
--- a/doc/manual/troubleshooting/links-nix-store.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<section xmlns="http://docbook.org/ns/docbook"
- xmlns:xlink="http://www.w3.org/1999/xlink"
- xmlns:xi="http://www.w3.org/2001/XInclude"
- version="5.0"
- xml:id="sec-links-nix-store">
-
-<title><quote>Too many links</quote> Error in the Nix store</title>
-
-
-<para>Symptom: when building something, you get an error message such as
-
-<screen>
-...
-<literal>mkdir: cannot create directory `/nix/store/<replaceable>name</replaceable>': Too many links</literal></screen>
-
-</para>
-
-<para>This is usually because you have more than 32,000 subdirectories
-in <filename>/nix/store</filename>, as can be seen using <command>ls
--l</command>:
-
-<screen>
-$ ls -ld /nix/store
-drwxrwxrwt 32000 nix nix 4620288 Sep 8 15:08 store</screen>
-
-The <literal>ext2</literal> file system is limited to an inode link
-count of 32,000 (each subdirectory increasing the count by one).
-Furthermore, the <literal>st_nlink</literal> field of the
-<function>stat</function> system call is a 16-bit value.</para>
-
-<para>This only happens on very large Nix installations (such as build
-machines).</para>
-
-<para>Quick solution: run the garbage collector. You may want to use
-the <option>--max-links</option> option.</para>
-
-<para>Real solution: put the Nix store on a file system that supports
-more than 32,000 subdirectories per directory, such as ext4. (This
-doesn’t solve the <literal>st_nlink</literal> limit, but ext4 lies to
-the kernel by reporting a link count of 1 if it exceeds the
-limit.)</para>
-
-</section>
diff --git a/doc/manual/troubleshooting/troubleshooting.xml b/doc/manual/troubleshooting/troubleshooting.xml
deleted file mode 100644
index 1e973a192..000000000
--- a/doc/manual/troubleshooting/troubleshooting.xml
+++ /dev/null
@@ -1,16 +0,0 @@
-<appendix xmlns="http://docbook.org/ns/docbook"
- xmlns:xlink="http://www.w3.org/1999/xlink"
- xmlns:xi="http://www.w3.org/2001/XInclude"
- version="5.0"
- xml:id="ch-troubleshooting">
-
-<title>Troubleshooting</title>
-
-<para>This section provides solutions for some common problems. See
-the <link xlink:href="https://github.com/NixOS/nix/issues">Nix bug
-tracker</link> for a list of currently known issues.</para>
-
-<xi:include href="collisions-nixenv.xml" />
-<xi:include href="links-nix-store.xml" />
-
-</appendix>
diff --git a/mk/libraries.mk b/mk/libraries.mk
index 3cd7a5310..14c95fa91 100644
--- a/mk/libraries.mk
+++ b/mk/libraries.mk
@@ -45,6 +45,11 @@ endif
# - $(1)_INSTALL_DIR: the directory where the library will be
# installed. Defaults to $(libdir).
#
+# - $(1)_EXCLUDE_FROM_LIBRARY_LIST: if defined, the library will not
+# be automatically marked as a dependency of the top-level all
+# target andwill not be listed in the make help output. This is
+# useful for libraries built solely for testing, for example.
+#
# - BUILD_SHARED_LIBS: if equal to ‘1’, a dynamic library will be
# built, otherwise a static library.
define build-library
@@ -149,7 +154,9 @@ define build-library
$(1)_DEPS := $$(foreach fn, $$($(1)_OBJS), $$(call filename-to-dep, $$(fn)))
-include $$($(1)_DEPS)
+ ifndef $(1)_EXCLUDE_FROM_LIBRARY_LIST
libs-list += $$($(1)_PATH)
+ endif
clean-files += $$(_d)/*.a $$(_d)/*.$(SO_EXT) $$(_d)/*.o $$(_d)/.*.dep $$($(1)_DEPS) $$($(1)_OBJS)
dist-files += $$(_srcs)
endef
diff --git a/mk/tests.mk b/mk/tests.mk
index e353d46a0..70c30661b 100644
--- a/mk/tests.mk
+++ b/mk/tests.mk
@@ -39,7 +39,7 @@ installcheck:
echo "$${red}$$failed out of $$total tests failed $$normal"; \
exit 1; \
else \
- echo "$${green}All tests succeeded"; \
+ echo "$${green}All tests succeeded$$normal"; \
fi
.PHONY: check installcheck
diff --git a/release-common.nix b/release-common.nix
index 4553118e1..a4ae24ba4 100644
--- a/release-common.nix
+++ b/release-common.nix
@@ -1,7 +1,9 @@
{ pkgs }:
rec {
- sh = pkgs.busybox.override {
+ # Use "busybox-sandbox-shell" if present,
+ # if not (legacy) fallback and hope it's sufficient.
+ sh = pkgs.busybox-sandbox-shell or (pkgs.busybox.override {
useMusl = true;
enableStatic = true;
enableMinimal = true;
@@ -11,7 +13,7 @@ rec {
CONFIG_ASH_TEST y
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
'';
- };
+ });
configureFlags =
[ "--disable-init-state"
diff --git a/release.nix b/release.nix
index 68b586232..9e04f0b67 100644
--- a/release.nix
+++ b/release.nix
@@ -1,4 +1,4 @@
-{ nix ? fetchGit ./.
+{ nix ? builtins.fetchGit ./.
, nixpkgs ? fetchTarball channel:nixos-17.09
, officialRelease ? false
, systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ]
@@ -6,7 +6,7 @@
let
- pkgs = import nixpkgs {};
+ pkgs = import nixpkgs { system = builtins.currentSystem or "x86_64-linux"; };
jobs = rec {
@@ -224,11 +224,13 @@ let
nix = build.x86_64-linux; system = "x86_64-linux";
});
- tests.setuid = pkgs.lib.genAttrs (pkgs.lib.filter (pkgs.lib.hasSuffix "-linux") systems) (system:
- import ./tests/setuid.nix rec {
- inherit nixpkgs;
- nix = build.${system}; inherit system;
- });
+ tests.setuid = pkgs.lib.genAttrs
+ ["i686-linux" "x86_64-linux"]
+ (system:
+ import ./tests/setuid.nix rec {
+ inherit nixpkgs;
+ nix = build.${system}; inherit system;
+ });
tests.binaryTarball =
with import nixpkgs { system = "x86_64-linux"; };
@@ -313,7 +315,8 @@ let
{ extraPackages =
[ "sqlite" "sqlite-devel" "bzip2-devel" "libcurl-devel" "openssl-devel" "xz-devel" "libseccomp-devel" ]
++ extraPackages; };
- memSize = 1024;
+ # At most 2047MB can be simulated in qemu-system-i386
+ memSize = 2047;
meta.schedulingPriority = 50;
postRPMInstall = "cd /tmp/rpmout/BUILD/nix-* && make installcheck";
#enableParallelBuilding = true;
diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index 515b4a87e..716b6e9bc 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -33,7 +33,7 @@ readonly NIX_FIRST_BUILD_UID="30001"
readonly NIX_ROOT="/nix"
readonly PLIST_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist
-readonly PROFILE_TARGETS=("/etc/profile" "/etc/bashrc" "/etc/zshrc")
+readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/zshrc")
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
@@ -647,7 +647,7 @@ chat_about_sudo() {
cat <<EOF
This script is going to call sudo a lot. Normally, it would show you
exactly what commands it is running and why. However, the script is
-run in a headless fashion, like this:
+run in a headless fashion, like this:
$ curl https://nixos.org/nix/install | sh
@@ -695,7 +695,7 @@ install_from_extracted_nix() {
cd "$EXTRACTED_NIX_PATH"
_sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \
- rsync -rlpt "$(pwd)/store/" "$NIX_ROOT/store/"
+ rsync -rlpt ./store/* "$NIX_ROOT/store/"
if [ -d "$NIX_INSTALLED_NIX" ]; then
echo " Alright! We have our first nix at $NIX_INSTALLED_NIX"
diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in
index 450d683c7..a5f52274f 100644
--- a/scripts/nix-profile.sh.in
+++ b/scripts/nix-profile.sh.in
@@ -75,7 +75,7 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then
export NIX_SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt"
fi
- if [ -n ${MANPATH} ]; then
+ if [ -n "${MANPATH}" ]; then
export MANPATH="$NIX_LINK/share/man:$MANPATH"
fi
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 445006b32..dbf8fe1b8 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -64,6 +64,8 @@ int main (int argc, char * * argv)
settings.maxBuildJobs.set("1"); // hack to make tests with local?root= work
+ initPlugins();
+
auto store = openStore().cast<LocalStore>();
/* It would be more appropriate to use $XDG_RUNTIME_DIR, since
@@ -218,9 +220,11 @@ connected:
signal(SIGALRM, old);
}
+ auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute;
+
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri));
- copyPaths(store, ref<Store>(sshStore), inputs, NoRepair, NoCheckSigs);
+ copyPaths(store, ref<Store>(sshStore), inputs, NoRepair, NoCheckSigs, substitute);
}
uploadLock = -1;
@@ -239,8 +243,8 @@ connected:
if (!missing.empty()) {
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri));
- setenv("NIX_HELD_LOCKS", concatStringsSep(" ", missing).c_str(), 1); /* FIXME: ugly */
- copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs);
+ store->locksHeld.insert(missing.begin(), missing.end()); /* FIXME: ugly */
+ copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, substitute);
}
return;
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 63de2d60a..b94bc597b 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -300,16 +300,25 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
{
countCalls = getEnv("NIX_COUNT_CALLS", "0") != "0";
- restricted = settings.restrictEval;
-
assert(gcInitialised);
/* Initialise the Nix expression search path. */
- Strings paths = parseNixPath(getEnv("NIX_PATH", ""));
- for (auto & i : _searchPath) addToSearchPath(i);
- for (auto & i : paths) addToSearchPath(i);
+ if (!settings.pureEval) {
+ Strings paths = parseNixPath(getEnv("NIX_PATH", ""));
+ for (auto & i : _searchPath) addToSearchPath(i);
+ for (auto & i : paths) addToSearchPath(i);
+ }
addToSearchPath("nix=" + settings.nixDataDir + "/nix/corepkgs");
+ if (settings.restrictEval || settings.pureEval) {
+ allowedPaths = PathSet();
+ for (auto & i : searchPath) {
+ auto r = resolveSearchPathElem(i);
+ if (!r.first) continue;
+ allowedPaths->insert(r.second);
+ }
+ }
+
clearValue(vEmptySet);
vEmptySet.type = tAttrs;
vEmptySet.attrs = allocBindings(0);
@@ -326,38 +335,36 @@ EvalState::~EvalState()
Path EvalState::checkSourcePath(const Path & path_)
{
- if (!restricted) return path_;
+ if (!allowedPaths) return path_;
+
+ bool found = false;
+
+ for (auto & i : *allowedPaths) {
+ if (isDirOrInDir(path_, i)) {
+ found = true;
+ break;
+ }
+ }
+
+ if (!found)
+ throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path_);
/* Resolve symlinks. */
debug(format("checking access to '%s'") % path_);
Path path = canonPath(path_, true);
- for (auto & i : searchPath) {
- auto r = resolveSearchPathElem(i);
- if (!r.first) continue;
- if (path == r.second || isInDir(path, r.second))
+ for (auto & i : *allowedPaths) {
+ if (isDirOrInDir(path, i))
return path;
}
- /* To support import-from-derivation, allow access to anything in
- the store. FIXME: only allow access to paths that have been
- constructed by this evaluation. */
- if (store->isInStore(path)) return path;
-
-#if 0
- /* Hack to support the chroot dependencies of corepkgs (see
- corepkgs/config.nix.in). */
- if (path == settings.nixPrefix && isStorePath(settings.nixPrefix))
- return path;
-#endif
-
- throw RestrictedPathError(format("access to path '%1%' is forbidden in restricted mode") % path_);
+ throw RestrictedPathError("access to path '%1%' is forbidden in restricted mode", path);
}
void EvalState::checkURI(const std::string & uri)
{
- if (!restricted) return;
+ if (!settings.restrictEval) return;
/* 'uri' should be equal to a prefix, or in a subdirectory of a
prefix. Thus, the prefix https://github.co does not permit
@@ -371,11 +378,33 @@ void EvalState::checkURI(const std::string & uri)
&& (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/')))
return;
+ /* If the URI is a path, then check it against allowedPaths as
+ well. */
+ if (hasPrefix(uri, "/")) {
+ checkSourcePath(uri);
+ return;
+ }
+
+ if (hasPrefix(uri, "file://")) {
+ checkSourcePath(std::string(uri, 7));
+ return;
+ }
+
throw RestrictedPathError("access to URI '%s' is forbidden in restricted mode", uri);
}
-void EvalState::addConstant(const string & name, Value & v)
+Path EvalState::toRealPath(const Path & path, const PathSet & context)
+{
+ // FIXME: check whether 'path' is in 'context'.
+ return
+ !context.empty() && store->isInStore(path)
+ ? store->toRealPath(path)
+ : path;
+};
+
+
+Value * EvalState::addConstant(const string & name, Value & v)
{
Value * v2 = allocValue();
*v2 = v;
@@ -383,12 +412,18 @@ void EvalState::addConstant(const string & name, Value & v)
baseEnv.values[baseEnvDispl++] = v2;
string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
baseEnv.values[0]->attrs->push_back(Attr(symbols.create(name2), v2));
+ return v2;
}
-void EvalState::addPrimOp(const string & name,
+Value * EvalState::addPrimOp(const string & name,
unsigned int arity, PrimOpFun primOp)
{
+ if (arity == 0) {
+ Value v;
+ primOp(*this, noPos, nullptr, v);
+ return addConstant(name, v);
+ }
Value * v = allocValue();
string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
Symbol sym = symbols.create(name2);
@@ -397,6 +432,7 @@ void EvalState::addPrimOp(const string & name,
staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl;
baseEnv.values[baseEnvDispl++] = v;
baseEnv.values[0]->attrs->push_back(Attr(sym, v));
+ return v;
}
@@ -649,8 +685,10 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
}
-void EvalState::evalFile(const Path & path, Value & v)
+void EvalState::evalFile(const Path & path_, Value & v)
{
+ auto path = checkSourcePath(path_);
+
FileEvalCache::iterator i;
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
v = i->second;
@@ -1546,7 +1584,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
dstPath = srcToStore[path];
else {
dstPath = settings.readOnlyMode
- ? store->computeStorePathForPath(checkSourcePath(path)).first
+ ? store->computeStorePathForPath(baseNameOf(path), checkSourcePath(path)).first
: store->addToStore(baseNameOf(path), checkSourcePath(path), true, htSHA256, defaultPathFilter, repair);
srcToStore[path] = dstPath;
printMsg(lvlChatty, format("copied source '%1%' -> '%2%'")
@@ -1668,10 +1706,13 @@ void EvalState::printStats()
printMsg(v, format(" time elapsed: %1%") % cpuTime);
printMsg(v, format(" size of a value: %1%") % sizeof(Value));
printMsg(v, format(" size of an attr: %1%") % sizeof(Attr));
- printMsg(v, format(" environments allocated: %1% (%2% bytes)") % nrEnvs % bEnvs);
- printMsg(v, format(" list elements: %1% (%2% bytes)") % nrListElems % bLists);
+ printMsg(v, format(" environments allocated count: %1%") % nrEnvs);
+ printMsg(v, format(" environments allocated bytes: %1%") % bEnvs);
+ printMsg(v, format(" list elements count: %1%") % nrListElems);
+ printMsg(v, format(" list elements bytes: %1%") % bLists);
printMsg(v, format(" list concatenations: %1%") % nrListConcats);
- printMsg(v, format(" values allocated: %1% (%2% bytes)") % nrValues % bValues);
+ printMsg(v, format(" values allocated count: %1%") % nrValues);
+ printMsg(v, format(" values allocated bytes: %1%") % bValues);
printMsg(v, format(" sets allocated: %1% (%2% bytes)") % nrAttrsets % bAttrsets);
printMsg(v, format(" right-biased unions: %1%") % nrOpUpdates);
printMsg(v, format(" values copied in right-biased unions: %1%") % nrOpUpdateValuesCopied);
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index f0ab1435b..51905d7e1 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -76,9 +76,9 @@ public:
already exist there. */
RepairFlag repair;
- /* If set, don't allow access to files outside of the Nix search
- path or to environment variables. */
- bool restricted;
+ /* The allowed filesystem paths in restricted or pure evaluation
+ mode. */
+ std::experimental::optional<PathSet> allowedPaths;
Value vEmptySet;
@@ -112,6 +112,15 @@ public:
void checkURI(const std::string & uri);
+ /* When using a diverted store and 'path' is in the Nix store, map
+ 'path' to the diverted location (e.g. /nix/store/foo is mapped
+ to /home/alice/my-nix/nix/store/foo). However, this is only
+ done if the context is not empty, since otherwise we're
+ probably trying to read from the actual /nix/store. This is
+ intended to distinguish between import-from-derivation and
+ sources stored in the actual /nix/store. */
+ Path toRealPath(const Path & path, const PathSet & context);
+
/* Parse a Nix expression from the specified file. */
Expr * parseExprFromFile(const Path & path);
Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv);
@@ -201,9 +210,9 @@ private:
void createBaseEnv();
- void addConstant(const string & name, Value & v);
+ Value * addConstant(const string & name, Value & v);
- void addPrimOp(const string & name,
+ Value * addPrimOp(const string & name,
unsigned int arity, PrimOpFun primOp);
public:
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 602971427..466fd13e8 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -39,7 +39,7 @@ std::pair<string, string> decodeContext(const string & s)
size_t index = s.find("!", 1);
return std::pair<string, string>(string(s, index + 1), string(s, 1, index - 1));
} else
- return std::pair<string, string>(s.at(0) == '/' ? s: string(s, 1), "");
+ return std::pair<string, string>(s.at(0) == '/' ? s : string(s, 1), "");
}
@@ -49,24 +49,38 @@ InvalidPathError::InvalidPathError(const Path & path) :
void EvalState::realiseContext(const PathSet & context)
{
PathSet drvs;
+
for (auto & i : context) {
std::pair<string, string> decoded = decodeContext(i);
Path ctx = decoded.first;
assert(store->isStorePath(ctx));
if (!store->isValidPath(ctx))
throw InvalidPathError(ctx);
- if (!decoded.second.empty() && nix::isDerivation(ctx))
+ if (!decoded.second.empty() && nix::isDerivation(ctx)) {
drvs.insert(decoded.first + "!" + decoded.second);
+
+ /* Add the output of this derivation to the allowed
+ paths. */
+ if (allowedPaths) {
+ auto drv = store->derivationFromPath(decoded.first);
+ DerivationOutputs::iterator i = drv.outputs.find(decoded.second);
+ if (i == drv.outputs.end())
+ throw Error("derivation '%s' does not have an output named '%s'", decoded.first, decoded.second);
+ allowedPaths->insert(i->second.path);
+ }
+ }
}
- if (!drvs.empty()) {
- if (!settings.enableImportFromDerivation)
- throw EvalError(format("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin()));
- /* For performance, prefetch all substitute info. */
- PathSet willBuild, willSubstitute, unknown;
- unsigned long long downloadSize, narSize;
- store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
- store->buildPaths(drvs);
- }
+
+ if (drvs.empty()) return;
+
+ if (!settings.enableImportFromDerivation)
+ throw EvalError(format("attempted to realize '%1%' during evaluation but 'allow-import-from-derivation' is false") % *(drvs.begin()));
+
+ /* For performance, prefetch all substitute info. */
+ PathSet willBuild, willSubstitute, unknown;
+ unsigned long long downloadSize, narSize;
+ store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
+ store->buildPaths(drvs);
}
@@ -84,10 +98,10 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
% path % e.path % pos);
}
- path = state.checkSourcePath(path);
+ Path realPath = state.checkSourcePath(state.toRealPath(path, context));
if (state.store->isStorePath(path) && state.store->isValidPath(path) && isDerivation(path)) {
- Derivation drv = readDerivation(path);
+ Derivation drv = readDerivation(realPath);
Value & w = *state.allocValue();
state.mkAttrs(w, 3 + drv.outputs.size());
Value * v2 = state.allocAttr(w, state.sDrvPath);
@@ -114,7 +128,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
} else {
state.forceAttrs(*args[0]);
if (args[0]->attrs->empty())
- state.evalFile(path, v);
+ state.evalFile(realPath, v);
else {
Env * env = &state.allocEnv(args[0]->attrs->size());
env->up = &state.baseEnv;
@@ -127,8 +141,8 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
env->values[displ++] = attr.value;
}
- printTalkative("evaluating file '%1%'", path);
- Expr * e = state.parseExprFromFile(resolveExprPath(path), staticEnv);
+ printTalkative("evaluating file '%1%'", realPath);
+ Expr * e = state.parseExprFromFile(resolveExprPath(realPath), staticEnv);
e->eval(state, *env, v);
}
@@ -439,7 +453,7 @@ static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Val
static void prim_getEnv(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
string name = state.forceStringNoCtx(*args[0], pos);
- mkString(v, state.restricted ? "" : getEnv(name));
+ mkString(v, settings.restrictEval || settings.pureEval ? "" : getEnv(name));
}
@@ -863,7 +877,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
throw EvalError(format("cannot read '%1%', since path '%2%' is not valid, at %3%")
% path % e.path % pos);
}
- string s = readFile(state.checkSourcePath(path));
+ string s = readFile(state.checkSourcePath(state.toRealPath(path, context)));
if (s.find((char) 0) != string::npos)
throw Error(format("the contents of the file '%1%' cannot be represented as a Nix string") % path);
mkString(v, s.c_str());
@@ -1009,20 +1023,13 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
}
-static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v)
+static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
+ Value * filterFun, bool recursive, const Hash & expectedHash, Value & v)
{
- PathSet context;
- Path path = state.coerceToPath(pos, *args[1], context);
- if (!context.empty())
- throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos);
-
- state.forceValue(*args[0]);
- if (args[0]->type != tLambda)
- throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
-
- path = state.checkSourcePath(path);
-
- PathFilter filter = [&](const Path & path) {
+ const auto path = settings.pureEval && expectedHash ?
+ path_ :
+ state.checkSourcePath(path_);
+ PathFilter filter = filterFun ? ([&](const Path & path) {
auto st = lstat(path);
/* Call the filter function. The first argument is the path,
@@ -1031,7 +1038,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
mkString(arg1, path);
Value fun2;
- state.callFunction(*args[0], arg1, fun2, noPos);
+ state.callFunction(*filterFun, arg1, fun2, noPos);
Value arg2;
mkString(arg2,
@@ -1044,16 +1051,79 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
state.callFunction(fun2, arg2, res, noPos);
return state.forceBool(res, pos);
- };
+ }) : defaultPathFilter;
- Path dstPath = settings.readOnlyMode
- ? state.store->computeStorePathForPath(path, true, htSHA256, filter).first
- : state.store->addToStore(baseNameOf(path), path, true, htSHA256, filter, state.repair);
+ Path expectedStorePath;
+ if (expectedHash) {
+ expectedStorePath =
+ state.store->makeFixedOutputPath(recursive, expectedHash, name);
+ }
+ Path dstPath;
+ if (!expectedHash || !state.store->isValidPath(expectedStorePath)) {
+ dstPath = settings.readOnlyMode
+ ? state.store->computeStorePathForPath(name, path, recursive, htSHA256, filter).first
+ : state.store->addToStore(name, path, recursive, htSHA256, filter, state.repair);
+ if (expectedHash && expectedStorePath != dstPath) {
+ throw Error(format("store path mismatch in (possibly filtered) path added from '%1%'") % path);
+ }
+ } else
+ dstPath = expectedStorePath;
mkString(v, dstPath, {dstPath});
}
+static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ PathSet context;
+ Path path = state.coerceToPath(pos, *args[1], context);
+ if (!context.empty())
+ throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos);
+
+ state.forceValue(*args[0]);
+ if (args[0]->type != tLambda)
+ throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
+
+ addPath(state, pos, baseNameOf(path), path, args[0], true, Hash(), v);
+}
+
+static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+ state.forceAttrs(*args[0], pos);
+ Path path;
+ string name;
+ Value * filterFun = nullptr;
+ auto recursive = true;
+ Hash expectedHash;
+
+ for (auto & attr : *args[0]->attrs) {
+ const string & n(attr.name);
+ if (n == "path") {
+ PathSet context;
+ path = state.coerceToPath(*attr.pos, *attr.value, context);
+ if (!context.empty())
+ throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % *attr.pos);
+ } else if (attr.name == state.sName)
+ name = state.forceStringNoCtx(*attr.value, *attr.pos);
+ else if (n == "filter") {
+ state.forceValue(*attr.value);
+ filterFun = attr.value;
+ } else if (n == "recursive")
+ recursive = state.forceBool(*attr.value, *attr.pos);
+ else if (n == "sha256")
+ expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
+ else
+ throw EvalError(format("unsupported argument '%1%' to 'addPath', at %2%") % attr.name % *attr.pos);
+ }
+ if (path.empty())
+ throw EvalError(format("'path' required, at %1%") % pos);
+ if (name.empty())
+ name = baseNameOf(path);
+
+ addPath(state, pos, name, path, filterFun, recursive, expectedHash, v);
+}
+
+
/*************************************************************
* Sets
*************************************************************/
@@ -1929,7 +1999,14 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
state.checkURI(url);
+ if (settings.pureEval && !expectedHash)
+ throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
+
Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash);
+
+ if (state.allowedPaths)
+ state.allowedPaths->insert(res);
+
mkString(v, res, PathSet({res}));
}
@@ -1981,11 +2058,24 @@ void EvalState::createBaseEnv()
mkNull(v);
addConstant("null", v);
- mkInt(v, time(0));
- addConstant("__currentTime", v);
+ auto vThrow = addPrimOp("throw", 1, prim_throw);
+
+ auto addPurityError = [&](const std::string & name) {
+ Value * v2 = allocValue();
+ mkString(*v2, fmt("'%s' is not allowed in pure evaluation mode", name));
+ mkApp(v, *vThrow, *v2);
+ addConstant(name, v);
+ };
- mkString(v, settings.thisSystem);
- addConstant("__currentSystem", v);
+ if (!settings.pureEval) {
+ mkInt(v, time(0));
+ addConstant("__currentTime", v);
+ }
+
+ if (!settings.pureEval) {
+ mkString(v, settings.thisSystem);
+ addConstant("__currentSystem", v);
+ }
mkString(v, nixVersion);
addConstant("__nixVersion", v);
@@ -2001,10 +2091,10 @@ void EvalState::createBaseEnv()
addConstant("__langVersion", v);
// Miscellaneous
- addPrimOp("scopedImport", 2, prim_scopedImport);
+ auto vScopedImport = addPrimOp("scopedImport", 2, prim_scopedImport);
Value * v2 = allocValue();
mkAttrs(*v2, 0);
- mkApp(v, *baseEnv.values[baseEnvDispl - 1], *v2);
+ mkApp(v, *vScopedImport, *v2);
forceValue(v);
addConstant("import", v);
if (settings.enableNativeCode) {
@@ -2020,7 +2110,6 @@ void EvalState::createBaseEnv()
addPrimOp("__isBool", 1, prim_isBool);
addPrimOp("__genericClosure", 1, prim_genericClosure);
addPrimOp("abort", 1, prim_abort);
- addPrimOp("throw", 1, prim_throw);
addPrimOp("__addErrorContext", 2, prim_addErrorContext);
addPrimOp("__tryEval", 1, prim_tryEval);
addPrimOp("__getEnv", 1, prim_getEnv);
@@ -2035,7 +2124,10 @@ void EvalState::createBaseEnv()
// Paths
addPrimOp("__toPath", 1, prim_toPath);
- addPrimOp("__storePath", 1, prim_storePath);
+ if (settings.pureEval)
+ addPurityError("__storePath");
+ else
+ addPrimOp("__storePath", 1, prim_storePath);
addPrimOp("__pathExists", 1, prim_pathExists);
addPrimOp("baseNameOf", 1, prim_baseNameOf);
addPrimOp("dirOf", 1, prim_dirOf);
@@ -2049,6 +2141,7 @@ void EvalState::createBaseEnv()
addPrimOp("__fromJSON", 1, prim_fromJSON);
addPrimOp("__toFile", 2, prim_toFile);
addPrimOp("__filterSource", 2, prim_filterSource);
+ addPrimOp("__path", 1, prim_path);
// Sets
addPrimOp("__attrNames", 1, prim_attrNames);
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 39d23b04a..31bf3f84f 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -9,6 +9,9 @@ struct RegisterPrimOp
{
typedef std::vector<std::tuple<std::string, size_t, PrimOpFun>> PrimOps;
static PrimOps * primOps;
+ /* You can register a constant by passing an arity of 0. fun
+ will get called during EvalState initialization, so there
+ may be primops not yet added and builtins is not yet sorted. */
RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun);
};
diff --git a/src/libexpr/primops/fetchGit.cc b/src/libexpr/primops/fetchGit.cc
index 0d0b11958..2e3e2634d 100644
--- a/src/libexpr/primops/fetchGit.cc
+++ b/src/libexpr/primops/fetchGit.cc
@@ -22,10 +22,15 @@ struct GitInfo
uint64_t revCount = 0;
};
+std::regex revRegex("^[0-9a-fA-F]{40}$");
+
GitInfo exportGit(ref<Store> store, const std::string & uri,
std::experimental::optional<std::string> ref, std::string rev,
const std::string & name)
{
+ if (settings.pureEval && rev == "")
+ throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
+
if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) {
bool clean = true;
@@ -76,16 +81,12 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
if (!ref) ref = "master"s;
- if (rev != "") {
- std::regex revRegex("^[0-9a-fA-F]{40}$");
- if (!std::regex_match(rev, revRegex))
- throw Error("invalid Git revision '%s'", rev);
- }
+ if (rev != "" && !std::regex_match(rev, revRegex))
+ throw Error("invalid Git revision '%s'", rev);
Path cacheDir = getCacheDir() + "/nix/git";
if (!pathExists(cacheDir)) {
- createDirs(cacheDir);
runProgram("git", true, { "init", "--bare", cacheDir });
}
@@ -232,6 +233,9 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), gitInfo.shortRev);
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), gitInfo.revCount);
v.attrs->sort();
+
+ if (state.allowedPaths)
+ state.allowedPaths->insert(gitInfo.storePath);
}
static RegisterPrimOp r("fetchGit", 1, prim_fetchGit);
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index a317476c5..5517d83df 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -27,6 +27,9 @@ std::regex commitHashRegex("^[0-9a-fA-F]{40}$");
HgInfo exportMercurial(ref<Store> store, const std::string & uri,
std::string rev, const std::string & name)
{
+ if (settings.pureEval && rev == "")
+ throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
+
if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) {
bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == "";
@@ -196,6 +199,9 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(hgInfo.rev, 0, 12));
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), hgInfo.revCount);
v.attrs->sort();
+
+ if (state.allowedPaths)
+ state.allowedPaths->insert(hgInfo.storePath);
}
static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial);
diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc
index d3aac6aba..bcc05c2cd 100644
--- a/src/libmain/common-args.cc
+++ b/src/libmain/common-args.cc
@@ -37,6 +37,10 @@ MixCommonArgs::MixCommonArgs(const string & programName)
std::string cat = "config";
settings.convertToArgs(*this, cat);
+
+ // Backward compatibility hack: nix-env already had a --system flag.
+ if (programName == "nix-env") longFlags.erase("system");
+
hiddenCategories.insert(cat);
}
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 85d3c077b..90a486716 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -193,9 +193,6 @@ LegacyArgs::LegacyArgs(const std::string & programName,
mkFlag(0, "readonly-mode", "do not write to the Nix store",
&settings.readOnlyMode);
- mkFlag(0, "show-trace", "show Nix expression stack trace in evaluation errors",
- &settings.showTrace);
-
mkFlag(0, "no-gc-warning", "disable warning about not using '--add-root'",
&gcWarning, false);
diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh
index 1dcc4f0ac..8e4861232 100644
--- a/src/libmain/shared.hh
+++ b/src/libmain/shared.hh
@@ -22,6 +22,7 @@ public:
int handleExceptions(const string & programName, std::function<void()> fun);
+/* Don't forget to call initPlugins() after settings are initialized! */
void initNix();
void parseCmdLine(int argc, char * * argv,
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index ab971dd8b..d1b278b8e 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -149,7 +149,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::str
/* Compress the NAR. */
narInfo->compression = compression;
auto now1 = std::chrono::steady_clock::now();
- auto narCompressed = compress(compression, *nar);
+ auto narCompressed = compress(compression, *nar, parallelCompression);
auto now2 = std::chrono::steady_clock::now();
narInfo->fileHash = hashString(htSHA256, *narCompressed);
narInfo->fileSize = narCompressed->size();
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 8492ff600..e20b96844 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -19,6 +19,8 @@ public:
const Setting<bool> writeNARListing{this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
const Setting<Path> secretKeyFile{this, "", "secret-key", "path to secret key used to sign the binary cache"};
const Setting<Path> localNarCache{this, "", "local-nar-cache", "path to a local cache of NARs"};
+ const Setting<bool> parallelCompression{this, false, "parallel-compression",
+ "enable multi-threading compression, available for xz only currently"};
private:
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index d4bd650ba..cc69ff1c7 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -6,6 +6,7 @@
#include "archive.hh"
#include "affinity.hh"
#include "builtins.hh"
+#include "download.hh"
#include "finally.hh"
#include "compression.hh"
#include "json.hh"
@@ -1123,11 +1124,6 @@ void DerivationGoal::haveDerivation()
return;
}
- /* Reject doing a hash build of anything other than a fixed-output
- derivation. */
- if (buildMode == bmHash && !drv->isFixedOutput())
- throw Error("cannot do a hash build of non-fixed-output derivation '%1%'", drvPath);
-
/* We are first going to try to create the invalid output paths
through substitutes. If that doesn't work, we'll build
them. */
@@ -1319,9 +1315,7 @@ void DerivationGoal::inputsRealised()
allPaths.insert(inputPaths.begin(), inputPaths.end());
/* Is this a fixed-output derivation? */
- fixedOutput = true;
- for (auto & i : drv->outputs)
- if (i.second.hash == "") fixedOutput = false;
+ fixedOutput = drv->isFixedOutput();
/* Don't repeat fixed-output derivations since they're already
verified by their output hash.*/
@@ -1341,19 +1335,6 @@ void DerivationGoal::tryToBuild()
{
trace("trying to build");
- /* Check for the possibility that some other goal in this process
- has locked the output since we checked in haveDerivation().
- (It can't happen between here and the lockPaths() call below
- because we're not allowing multi-threading.) If so, put this
- goal to sleep until another goal finishes, then try again. */
- for (auto & i : drv->outputs)
- if (pathIsLockedByMe(worker.store.toRealPath(i.second.path))) {
- debug(format("putting derivation '%1%' to sleep because '%2%' is locked by another goal")
- % drvPath % i.second.path);
- worker.waitForAnyGoal(shared_from_this());
- return;
- }
-
/* Obtain locks on all output paths. The locks are automatically
released when we exit this function or Nix crashes. If we
can't acquire the lock, then continue; hopefully some other
@@ -1777,6 +1758,19 @@ PathSet exportReferences(Store & store, PathSet storePaths)
return paths;
}
+static std::once_flag dns_resolve_flag;
+
+static void preloadNSS() {
+ /* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
+ one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
+ been loaded in the parent. So we force a download of an invalid URL to force the NSS machinery to
+ load its lookup libraries in the parent before any child gets a chance to. */
+ std::call_once(dns_resolve_flag, []() {
+ DownloadRequest request("http://this.pre-initializes.the.dns.resolvers.invalid");
+ request.tries = 1; // We only need to do it once, and this also suppresses an annoying warning
+ try { getDownloader()->download(request); } catch (...) {}
+ });
+}
void DerivationGoal::startBuilder()
{
@@ -1787,6 +1781,9 @@ void DerivationGoal::startBuilder()
% drv->platform % settings.thisSystem % drvPath);
}
+ if (drv->isBuiltin())
+ preloadNSS();
+
#if __APPLE__
additionalSandboxProfile = get(drv->env, "__sandboxProfile");
#endif
@@ -1810,8 +1807,13 @@ void DerivationGoal::startBuilder()
useChroot = !fixedOutput && get(drv->env, "__noChroot") != "1";
}
- if (worker.store.storeDir != worker.store.realStoreDir)
- useChroot = true;
+ if (worker.store.storeDir != worker.store.realStoreDir) {
+ #if __linux__
+ useChroot = true;
+ #else
+ throw Error("building using a diverted store is not supported on this platform");
+ #endif
+ }
/* If `build-users-group' is not empty, then we have to build as
one of the members of that group. */
@@ -2928,8 +2930,13 @@ void DerivationGoal::runChild()
if (drv->isBuiltin()) {
try {
logger = makeJSONLogger(*logger);
+
+ BasicDerivation drv2(*drv);
+ for (auto & e : drv2.env)
+ e.second = rewriteStrings(e.second, inputRewrites);
+
if (drv->builder == "builtin:fetchurl")
- builtinFetchurl(*drv, netrcData);
+ builtinFetchurl(drv2, netrcData);
else
throw Error(format("unsupported builtin function '%1%'") % string(drv->builder, 8));
_exit(0);
@@ -2992,6 +2999,8 @@ void DerivationGoal::registerOutputs()
bool runDiffHook = settings.runDiffHook;
bool keepPreviousRound = settings.keepFailed || runDiffHook;
+ std::exception_ptr delayedException;
+
/* Check whether the output paths were created, and grep each
output path to determine what other paths it references. Also make all
output paths read-only. */
@@ -3066,7 +3075,7 @@ void DerivationGoal::registerOutputs()
/* Check that fixed-output derivations produced the right
outputs (i.e., the content hash should match the specified
hash). */
- if (i.second.hash != "") {
+ if (fixedOutput) {
bool recursive; Hash h;
i.second.parseHashInfo(recursive, h);
@@ -3082,27 +3091,34 @@ void DerivationGoal::registerOutputs()
/* Check the hash. In hash mode, move the path produced by
the derivation to its content-addressed location. */
Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath);
- if (buildMode == bmHash) {
- Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]);
- printError(format("build produced path '%1%' with %2% hash '%3%'")
- % dest % printHashType(h.type) % printHash16or32(h2));
- if (worker.store.isValidPath(dest))
- return;
+
+ Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]);
+
+ if (h != h2) {
+
+ /* Throw an error after registering the path as
+ valid. */
+ delayedException = std::make_exception_ptr(
+ BuildError("fixed-output derivation produced path '%s' with %s hash '%s' instead of the expected hash '%s'",
+ dest, printHashType(h.type), printHash16or32(h2), printHash16or32(h)));
+
Path actualDest = worker.store.toRealPath(dest);
+
+ if (worker.store.isValidPath(dest))
+ std::rethrow_exception(delayedException);
+
if (actualPath != actualDest) {
PathLocks outputLocks({actualDest});
deletePath(actualDest);
if (rename(actualPath.c_str(), actualDest.c_str()) == -1)
throw SysError(format("moving '%1%' to '%2%'") % actualPath % dest);
}
+
path = dest;
actualPath = actualDest;
- } else {
- if (h != h2)
- throw BuildError(
- format("output path '%1%' has %2% hash '%3%' when '%4%' was expected")
- % path % i.second.hashAlgo % printHash16or32(h2) % printHash16or32(h));
}
+ else
+ assert(path == dest);
info.ca = makeFixedOutputCA(recursive, h2);
}
@@ -3279,6 +3295,11 @@ void DerivationGoal::registerOutputs()
paths referenced by each of them. If there are cycles in the
outputs, this will fail. */
worker.store.registerValidPaths(infos);
+
+ /* In case of a fixed-output derivation hash mismatch, throw an
+ exception now that we have registered the output as valid. */
+ if (delayedException)
+ std::rethrow_exception(delayedException);
}
@@ -3394,7 +3415,7 @@ void DerivationGoal::flushLine()
else {
if (settings.verboseBuild &&
(settings.printRepeatedBuilds || curRound == 1))
- printError(filterANSIEscapes(currentLogLine, true));
+ printError(currentLogLine);
else {
logTail.push_back(currentLogLine);
if (logTail.size() > settings.logLines) logTail.pop_front();
@@ -3636,7 +3657,7 @@ void SubstitutionGoal::tryNext()
/* Update the total expected download size. */
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(info);
- maintainExpectedNar = std::make_unique<MaintainCount<uint64_t>>(worker.expectedNarSize, narInfo->narSize);
+ maintainExpectedNar = std::make_unique<MaintainCount<uint64_t>>(worker.expectedNarSize, info->narSize);
maintainExpectedDownload =
narInfo && narInfo->fileSize
@@ -3650,9 +3671,12 @@ void SubstitutionGoal::tryNext()
/* Bail out early if this substituter lacks a valid
signature. LocalStore::addToStore() also checks for this, but
only after we've downloaded the path. */
- if (worker.store.requireSigs && !info->checkSignatures(worker.store, worker.store.publicKeys)) {
- printInfo(format("warning: substituter '%s' does not have a valid signature for path '%s'")
- % sub->getUri() % storePath);
+ if (worker.store.requireSigs
+ && !sub->isTrusted
+ && !info->checkSignatures(worker.store, worker.store.publicKeys))
+ {
+ printError("warning: substituter '%s' does not have a valid signature for path '%s'",
+ sub->getUri(), storePath);
tryNext();
return;
}
@@ -3702,6 +3726,17 @@ void SubstitutionGoal::tryToRun()
return;
}
+ /* If the store path is already locked (probably by a
+ DerivationGoal), then put this goal to sleep. Note: we don't
+ acquire a lock here since that breaks addToStore(), so below we
+ handle an AlreadyLocked exception from addToStore(). The check
+ here is just an optimisation to prevent having to redo a
+ download due to a locked path. */
+ if (pathIsLockedByMe(worker.store.toRealPath(storePath))) {
+ worker.waitForAWhile(shared_from_this());
+ return;
+ }
+
maintainRunningSubstitutions = std::make_unique<MaintainCount<uint64_t>>(worker.runningSubstitutions);
worker.updateProgress();
@@ -3718,7 +3753,7 @@ void SubstitutionGoal::tryToRun()
PushActivity pact(act.id);
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
- storePath, repair);
+ storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs);
promise.set_value();
} catch (...) {
@@ -3741,8 +3776,14 @@ void SubstitutionGoal::finished()
try {
promise.get_future().get();
+ } catch (AlreadyLocked & e) {
+ /* Probably a DerivationGoal is already building this store
+ path. Sleep for a while and try again. */
+ state = &SubstitutionGoal::init;
+ worker.waitForAWhile(shared_from_this());
+ return;
} catch (Error & e) {
- printInfo(e.msg());
+ printError(e.msg());
/* Try the next substitute. */
state = &SubstitutionGoal::tryNext;
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index 4474dfd4b..258d7937c 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -17,11 +17,13 @@
#include <curl/curl.h>
-#include <queue>
-#include <iostream>
-#include <thread>
+#include <algorithm>
#include <cmath>
+#include <cstring>
+#include <iostream>
+#include <queue>
#include <random>
+#include <thread>
using namespace std::string_literals;
@@ -91,6 +93,8 @@ struct CurlDownloader : public Downloader
{
if (!request.expectedETag.empty())
requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
+ if (!request.mimeType.empty())
+ requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
}
~DownloadItem()
@@ -185,6 +189,22 @@ struct CurlDownloader : public Downloader
return 0;
}
+ size_t readOffset = 0;
+ int readCallback(char *buffer, size_t size, size_t nitems)
+ {
+ if (readOffset == request.data->length())
+ return 0;
+ auto count = std::min(size * nitems, request.data->length() - readOffset);
+ memcpy(buffer, request.data->data() + readOffset, count);
+ readOffset += count;
+ return count;
+ }
+
+ static int readCallbackWrapper(char *buffer, size_t size, size_t nitems, void * userp)
+ {
+ return ((DownloadItem *) userp)->readCallback(buffer, size, nitems);
+ }
+
long lowSpeedTimeout = 300;
void init()
@@ -225,6 +245,13 @@ struct CurlDownloader : public Downloader
if (request.head)
curl_easy_setopt(req, CURLOPT_NOBODY, 1);
+ if (request.data) {
+ curl_easy_setopt(req, CURLOPT_UPLOAD, 1L);
+ curl_easy_setopt(req, CURLOPT_READFUNCTION, readCallbackWrapper);
+ curl_easy_setopt(req, CURLOPT_READDATA, this);
+ curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length());
+ }
+
if (request.verifyTLS) {
if (settings.caFile != "")
curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str());
@@ -265,7 +292,7 @@ struct CurlDownloader : public Downloader
}
if (code == CURLE_OK &&
- (httpStatus == 200 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */))
+ (httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */))
{
result.cached = httpStatus == 304;
done = true;
@@ -303,6 +330,7 @@ struct CurlDownloader : public Downloader
// Don't bother retrying on certain cURL errors either
switch (code) {
case CURLE_FAILED_INIT:
+ case CURLE_URL_MALFORMAT:
case CURLE_NOT_BUILT_IN:
case CURLE_REMOTE_ACCESS_DENIED:
case CURLE_FILE_COULDNT_READ_FILE:
@@ -311,10 +339,10 @@ struct CurlDownloader : public Downloader
case CURLE_BAD_FUNCTION_ARGUMENT:
case CURLE_INTERFACE_FAILED:
case CURLE_UNKNOWN_OPTION:
- err = Misc;
- break;
+ err = Misc;
+ break;
default: // Shut up warnings
- break;
+ break;
}
}
diff --git a/src/libstore/download.hh b/src/libstore/download.hh
index f2d65ad8d..d9d525d4e 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/download.hh
@@ -18,6 +18,8 @@ struct DownloadRequest
unsigned int baseRetryTimeMs = 250;
ActivityId parentAct;
bool decompress = true;
+ std::shared_ptr<std::string> data;
+ std::string mimeType;
DownloadRequest(const std::string & uri)
: uri(uri), parentAct(curActivity) { }
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index ab2c5ca02..943b16c28 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -324,10 +324,8 @@ Roots LocalStore::findRootsNoTemp()
{
Roots roots;
- /* Process direct roots in {gcroots,manifests,profiles}. */
+ /* Process direct roots in {gcroots,profiles}. */
findRoots(stateDir + "/" + gcRootsDir, DT_UNKNOWN, roots);
- if (pathExists(stateDir + "/manifests"))
- findRoots(stateDir + "/manifests", DT_UNKNOWN, roots);
findRoots(stateDir + "/profiles", DT_UNKNOWN, roots);
/* Add additional roots returned by the program specified by the
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index d3c96ddd6..21ab0e629 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -6,6 +6,7 @@
#include <algorithm>
#include <map>
#include <thread>
+#include <dlfcn.h>
namespace nix {
@@ -137,4 +138,18 @@ void MaxBuildJobsSetting::set(const std::string & str)
throw UsageError("configuration setting '%s' should be 'auto' or an integer", name);
}
+
+void initPlugins()
+{
+ for (const auto & pluginFile : settings.pluginFiles.get()) {
+ /* handle is purposefully leaked as there may be state in the
+ DSO needed by the action of the plugin. */
+ void *handle =
+ dlopen(pluginFile.c_str(), RTLD_LAZY | RTLD_LOCAL);
+ if (!handle)
+ throw Error(format("could not dynamically open plugin file '%1%': %2%") % pluginFile % dlerror());
+ }
+}
+
+
}
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 5c857cbb6..508084d08 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -138,6 +138,11 @@ public:
Setting<std::string> builders{this, "@" + nixConfDir + "/machines", "builders",
"A semicolon-separated list of build machines, in the format of nix.machines."};
+ Setting<bool> buildersUseSubstitutes{this, false, "builders-use-substitutes",
+ "Whether build machines should use their own substitutes for obtaining "
+ "build dependencies if possible, rather than waiting for this host to "
+ "upload them."};
+
Setting<off_t> reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space",
"Amount of reserved disk space for the garbage collector."};
@@ -150,7 +155,7 @@ public:
Setting<bool> syncBeforeRegistering{this, false, "sync-before-registering",
"Whether to call sync() before registering a path as valid."};
- Setting<bool> useSubstitutes{this, true, "use-substitutes",
+ Setting<bool> useSubstitutes{this, true, "substitute",
"Whether to use substitutes.",
{"build-use-substitutes"}};
@@ -206,7 +211,8 @@ public:
bool lockCPU;
/* Whether to show a stack trace if Nix evaluation fails. */
- bool showTrace = false;
+ Setting<bool> showTrace{this, false, "show-trace",
+ "Whether to show a stack trace on evaluation errors."};
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."};
@@ -227,6 +233,9 @@ public:
"Whether to restrict file system access to paths in $NIX_PATH, "
"and network access to the URI prefixes listed in 'allowed-uris'."};
+ Setting<bool> pureEval{this, false, "pure-eval",
+ "Whether to restrict file system and network access to files specified by cryptographic hash."};
+
Setting<size_t> buildRepeat{this, 0, "repeat",
"The number of times to repeat a build in order to verify determinism.",
{"build-repeat"}};
@@ -278,10 +287,7 @@ public:
Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl",
"How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."};
- Setting<std::string> signedBinaryCaches{this, "*", "signed-binary-caches",
- "Obsolete."};
-
- Setting<bool> requireSigs{this, signedBinaryCaches == "*", "require-sigs",
+ Setting<bool> requireSigs{this, true, "require-sigs",
"Whether to check that any non-content-addressed path added to the "
"Nix store has a valid signature (that is, one signed using a key "
"listed in 'trusted-public-keys'."};
@@ -361,12 +367,19 @@ public:
Setting<Strings> allowedUris{this, {}, "allowed-uris",
"Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."};
+
+ Setting<Paths> pluginFiles{this, {}, "plugin-files",
+ "Plugins to dynamically load at nix initialization time."};
};
// FIXME: don't use a global variable.
extern Settings settings;
+/* This should be called after settings are initialized, but before
+ anything else */
+void initPlugins();
+
extern const string nixVersion;
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 057337685..b9e9cd5da 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -38,7 +38,7 @@ public:
try {
BinaryCacheStore::init();
} catch (UploadToHTTP &) {
- throw Error(format("'%s' does not appear to be a binary cache") % cacheUri);
+ throw Error("'%s' does not appear to be a binary cache", cacheUri);
}
diskCache->createCache(cacheUri, storeDir, wantMassQuery_, priority);
}
@@ -67,7 +67,14 @@ protected:
const std::string & data,
const std::string & mimeType) override
{
- throw UploadToHTTP("uploading to an HTTP binary cache is not supported");
+ auto req = DownloadRequest(cacheUri + "/" + path);
+ req.data = std::make_shared<string>(data); // FIXME: inefficient
+ req.mimeType = mimeType;
+ try {
+ getDownloader()->download(req);
+ } catch (DownloadError & e) {
+ throw UploadToHTTP(format("uploading to HTTP binary cache at %1% not supported: %2%") % cacheUri % e.msg());
+ }
}
void getFile(const std::string & path,
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 7afecc1cf..4afe51ea9 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -992,8 +992,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, const ref<std::string> &
/* Lock the output path. But don't lock if we're being called
from a build hook (whose parent process already acquired a
lock on this path). */
- Strings locksHeld = tokenizeString<Strings>(getEnv("NIX_HELD_LOCKS"));
- if (find(locksHeld.begin(), locksHeld.end(), info.path) == locksHeld.end())
+ if (!locksHeld.count(info.path))
outputLock.lockPaths({realPath});
if (repair || !isValidPath(info.path)) {
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index d35cd1a94..bbd50e1c1 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -19,7 +19,7 @@ namespace nix {
/* Nix store and database schema version. Version 1 (or 0) was Nix <=
0.7. Version 2 was Nix 0.8 and 0.9. Version 3 is Nix 0.10.
Version 4 is Nix 0.11. Version 5 is Nix 0.12-0.16. Version 6 is
- Nix 1.0. Version 7 is Nix 1.3. Version 10 is 1.12. */
+ Nix 1.0. Version 7 is Nix 1.3. Version 10 is 2.0. */
const int nixSchemaVersion = 10;
@@ -104,6 +104,9 @@ private:
public:
+ // Hack for build-remote.cc.
+ PathSet locksHeld = tokenizeString<PathSet>(getEnv("NIX_HELD_LOCKS"));
+
/* Initialise the local store, upgrading the schema if
necessary. */
LocalStore(const Params & params);
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index 50c46ce6f..239356aee 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -9,6 +9,9 @@ libstore_SOURCES := $(wildcard $(d)/*.cc)
libstore_LIBS = libutil libformat
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
+ifneq ($(OS), FreeBSD)
+ libstore_LDFLAGS += -ldl
+endif
libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
diff --git a/src/libstore/pathlocks.cc b/src/libstore/pathlocks.cc
index 587f29598..08d1efdbe 100644
--- a/src/libstore/pathlocks.cc
+++ b/src/libstore/pathlocks.cc
@@ -113,8 +113,10 @@ bool PathLocks::lockPaths(const PathSet & _paths,
{
auto lockedPaths(lockedPaths_.lock());
- if (lockedPaths->count(lockPath))
- throw Error("deadlock: trying to re-acquire self-held lock '%s'", lockPath);
+ if (lockedPaths->count(lockPath)) {
+ if (!wait) return false;
+ throw AlreadyLocked("deadlock: trying to re-acquire self-held lock '%s'", lockPath);
+ }
lockedPaths->insert(lockPath);
}
diff --git a/src/libstore/pathlocks.hh b/src/libstore/pathlocks.hh
index 2a7de6114..db51f950a 100644
--- a/src/libstore/pathlocks.hh
+++ b/src/libstore/pathlocks.hh
@@ -2,10 +2,8 @@
#include "util.hh"
-
namespace nix {
-
/* Open (possibly create) a lock file and return the file descriptor.
-1 is returned if create is false and the lock could not be opened
because it doesn't exist. Any other error throws an exception. */
@@ -18,6 +16,7 @@ enum LockType { ltRead, ltWrite, ltNone };
bool lockFile(int fd, LockType lockType, bool wait);
+MakeError(AlreadyLocked, Error);
class PathLocks
{
@@ -38,9 +37,6 @@ public:
void setDeletion(bool deletePaths);
};
-
-// FIXME: not thread-safe!
bool pathIsLockedByMe(const Path & path);
-
}
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 77b41b6bf..8f0b65557 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -78,9 +78,22 @@ UDSRemoteStore::UDSRemoteStore(const Params & params)
}
+UDSRemoteStore::UDSRemoteStore(std::string socket_path, const Params & params)
+ : Store(params)
+ , LocalFSStore(params)
+ , RemoteStore(params)
+ , path(socket_path)
+{
+}
+
+
std::string UDSRemoteStore::getUri()
{
- return "daemon";
+ if (path) {
+ return std::string("unix://") + *path;
+ } else {
+ return "daemon";
+ }
}
@@ -98,7 +111,7 @@ ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
throw SysError("cannot create Unix domain socket");
closeOnExec(conn->fd.get());
- string socketPath = settings.nixDaemonSocketFile;
+ string socketPath = path ? *path : settings.nixDaemonSocketFile;
struct sockaddr_un addr;
addr.sun_family = AF_UNIX;
@@ -721,5 +734,14 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source)
}
}
+static std::string uriScheme = "unix://";
+
+static RegisterStoreImplementation regStore([](
+ const std::string & uri, const Store::Params & params)
+ -> std::shared_ptr<Store>
+{
+ if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
+ return std::make_shared<UDSRemoteStore>(std::string(uri, uriScheme.size()), params);
+});
}
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 30c6beae6..7f36e2064 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -134,6 +134,7 @@ class UDSRemoteStore : public LocalFSStore, public RemoteStore
public:
UDSRemoteStore(const Params & params);
+ UDSRemoteStore(std::string path, const Params & params);
std::string getUri() override;
@@ -145,6 +146,7 @@ private:
};
ref<RemoteStore::Connection> openConnection() override;
+ std::experimental::optional<std::string> path;
};
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 77ab87ef7..4d43ef082 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -222,11 +222,10 @@ Path Store::makeTextPath(const string & name, const Hash & hash,
}
-std::pair<Path, Hash> Store::computeStorePathForPath(const Path & srcPath,
- bool recursive, HashType hashAlgo, PathFilter & filter) const
+std::pair<Path, Hash> Store::computeStorePathForPath(const string & name,
+ const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter) const
{
Hash h = recursive ? hashPath(hashAlgo, srcPath, filter).first : hashFile(hashAlgo, srcPath);
- string name = baseNameOf(srcPath);
Path dstPath = makeFixedOutputPath(recursive, h, name);
return std::pair<Path, Hash>(dstPath, h);
}
@@ -897,7 +896,11 @@ std::list<ref<Store>> getDefaultSubstituters()
auto addStore = [&](const std::string & uri) {
if (done.count(uri)) return;
done.insert(uri);
- stores.push_back(openStore(uri));
+ try {
+ stores.push_back(openStore(uri));
+ } catch (Error & e) {
+ printError("warning: %s", e.what());
+ }
};
for (auto uri : settings.substituters.get())
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index d1e1b5d6f..563aa566b 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -192,7 +192,7 @@ struct ValidPathInfo
typedef list<ValidPathInfo> ValidPathInfos;
-enum BuildMode { bmNormal, bmRepair, bmCheck, bmHash };
+enum BuildMode { bmNormal, bmRepair, bmCheck };
struct BuildResult
@@ -248,6 +248,8 @@ public:
const Setting<int> pathInfoCacheSize{this, 65536, "path-info-cache-size", "size of the in-memory store path information cache"};
+ const Setting<bool> isTrusted{this, false, "trusted", "whether paths from this store can be used as substitutes even when they lack trusted signatures"};
+
protected:
struct State
@@ -305,9 +307,9 @@ public:
/* This is the preparatory part of addToStore(); it computes the
store path to which srcPath is to be copied. Returns the store
path and the cryptographic hash of the contents of srcPath. */
- std::pair<Path, Hash> computeStorePathForPath(const Path & srcPath,
- bool recursive = true, HashType hashAlgo = htSHA256,
- PathFilter & filter = defaultPathFilter) const;
+ std::pair<Path, Hash> computeStorePathForPath(const string & name,
+ const Path & srcPath, bool recursive = true,
+ HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
/* Preparatory part of addTextToStore().
@@ -597,6 +599,11 @@ public:
"nix-cache-info" file. Lower value means higher priority. */
virtual int getPriority() { return 0; }
+ virtual Path toRealPath(const Path & storePath)
+ {
+ return storePath;
+ }
+
protected:
Stats stats;
@@ -639,9 +646,10 @@ public:
virtual Path getRealStoreDir() { return storeDir; }
- Path toRealPath(const Path & storePath)
+ Path toRealPath(const Path & storePath) override
{
- return getRealStoreDir() + "/" + baseNameOf(storePath);
+ assert(isInStore(storePath));
+ return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
}
std::shared_ptr<std::string> getBuildLog(const Path & path) override;
@@ -699,6 +707,9 @@ void removeTempRoots();
* ‘daemon’: The Nix store accessed via a Unix domain socket
connection to nix-daemon.
+ * ‘unix://<path>’: The Nix store accessed via a Unix domain socket
+ connection to nix-daemon, with the socket located at <path>.
+
* ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on
whether the user has write access to the local Nix
store/database.
diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc
index 5e2631ba3..ed15761b3 100644
--- a/src/libutil/compression.cc
+++ b/src/libutil/compression.cc
@@ -151,10 +151,10 @@ static ref<std::string> decompressBrotli(const std::string & in)
#endif // HAVE_BROTLI
}
-ref<std::string> compress(const std::string & method, const std::string & in)
+ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel)
{
StringSink ssink;
- auto sink = makeCompressionSink(method, ssink);
+ auto sink = makeCompressionSink(method, ssink, parallel);
(*sink)(in);
sink->finish();
return ssink.s;
@@ -189,10 +189,28 @@ struct XzSink : CompressionSink
lzma_stream strm = LZMA_STREAM_INIT;
bool finished = false;
- XzSink(Sink & nextSink) : nextSink(nextSink)
+ XzSink(Sink & nextSink, const bool parallel) : nextSink(nextSink)
{
- lzma_ret ret = lzma_easy_encoder(
- &strm, 6, LZMA_CHECK_CRC64);
+ lzma_ret ret;
+ if (parallel) {
+ lzma_mt mt_options = {};
+ mt_options.flags = 0;
+ mt_options.timeout = 300; // Using the same setting as the xz cmd line
+ mt_options.preset = LZMA_PRESET_DEFAULT;
+ mt_options.filters = NULL;
+ mt_options.check = LZMA_CHECK_CRC64;
+ mt_options.threads = lzma_cputhreads();
+ mt_options.block_size = 0;
+ if (mt_options.threads == 0)
+ mt_options.threads = 1;
+ // FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the
+ // number of threads.
+ ret = lzma_stream_encoder_mt(
+ &strm, &mt_options);
+ } else
+ ret = lzma_easy_encoder(
+ &strm, 6, LZMA_CHECK_CRC64);
+
if (ret != LZMA_OK)
throw CompressionError("unable to initialise lzma encoder");
// FIXME: apply the x86 BCJ filter?
@@ -449,12 +467,12 @@ struct BrotliSink : CompressionSink
};
#endif // HAVE_BROTLI
-ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink)
+ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel)
{
if (method == "none")
return make_ref<NoneSink>(nextSink);
else if (method == "xz")
- return make_ref<XzSink>(nextSink);
+ return make_ref<XzSink>(nextSink, parallel);
else if (method == "bzip2")
return make_ref<BzipSink>(nextSink);
else if (method == "br")
diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh
index e3e6f5a99..a0d7530d7 100644
--- a/src/libutil/compression.hh
+++ b/src/libutil/compression.hh
@@ -8,7 +8,7 @@
namespace nix {
-ref<std::string> compress(const std::string & method, const std::string & in);
+ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel = false);
ref<std::string> decompress(const std::string & method, const std::string & in);
@@ -17,7 +17,7 @@ struct CompressionSink : BufferedSink
virtual void finish() = 0;
};
-ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink);
+ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false);
MakeError(UnknownCompressionMethod, Error);
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index 6924e0080..27a631a37 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -44,7 +44,7 @@ public:
prefix = std::string("<") + c + ">";
}
- writeToStderr(prefix + (tty ? fs.s : filterANSIEscapes(fs.s)) + "\n");
+ writeToStderr(prefix + filterANSIEscapes(fs.s) + "\n");
}
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 197df0c44..f7a12d21b 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -192,6 +192,12 @@ bool isInDir(const Path & path, const Path & dir)
}
+bool isDirOrInDir(const Path & path, const Path & dir)
+{
+ return path == dir or isInDir(path, dir);
+}
+
+
struct stat lstat(const Path & path)
{
struct stat st;
@@ -1172,36 +1178,51 @@ void ignoreException()
}
-string filterANSIEscapes(const string & s, bool nixOnly)
+std::string filterANSIEscapes(const std::string & s, unsigned int width)
{
- string t, r;
- enum { stTop, stEscape, stCSI } state = stTop;
- for (auto c : s) {
- if (state == stTop) {
- if (c == '\e') {
- state = stEscape;
- r = c;
- } else
- t += c;
- } else if (state == stEscape) {
- r += c;
- if (c == '[')
- state = stCSI;
- else {
- t += r;
- state = stTop;
+ std::string t, e;
+ size_t w = 0;
+ auto i = s.begin();
+
+ while (w < (size_t) width && i != s.end()) {
+
+ if (*i == '\e') {
+ std::string e;
+ e += *i++;
+ char last = 0;
+
+ if (i != s.end() && *i == '[') {
+ e += *i++;
+ // eat parameter bytes
+ while (i != s.end() && *i >= 0x30 && *i <= 0x3f) e += *i++;
+ // eat intermediate bytes
+ while (i != s.end() && *i >= 0x20 && *i <= 0x2f) e += *i++;
+ // eat final byte
+ if (i != s.end() && *i >= 0x40 && *i <= 0x7e) e += last = *i++;
+ } else {
+ if (i != s.end() && *i >= 0x40 && *i <= 0x5f) e += *i++;
}
- } else {
- r += c;
- if (c >= 0x40 && c <= 0x7e) {
- if (nixOnly && (c != 'p' && c != 'q' && c != 's' && c != 'a' && c != 'b'))
- t += r;
- state = stTop;
- r.clear();
+
+ if (last == 'm')
+ t += e;
+ }
+
+ else if (*i == '\t') {
+ i++; t += ' '; w++;
+ while (w < (size_t) width && w % 8) {
+ t += ' '; w++;
}
}
+
+ else if (*i == '\r')
+ // do nothing for now
+ ;
+
+ else {
+ t += *i++; w++;
+ }
}
- t += r;
+
return t;
}
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index a3494e09b..47e02bc89 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -53,10 +53,12 @@ Path dirOf(const Path & path);
following the final `/'. */
string baseNameOf(const Path & path);
-/* Check whether a given path is a descendant of the given
- directory. */
+/* Check whether 'path' is a descendant of 'dir'. */
bool isInDir(const Path & path, const Path & dir);
+/* Check whether 'path' is equal to 'dir' or a descendant of 'dir'. */
+bool isDirOrInDir(const Path & path, const Path & dir);
+
/* Get status of `path'. */
struct stat lstat(const Path & path);
@@ -386,10 +388,12 @@ void ignoreException();
#define ANSI_BLUE "\e[34;1m"
-/* Filter out ANSI escape codes from the given string. If ‘nixOnly’ is
- set, only filter escape codes generated by Nixpkgs' stdenv (used to
- denote nesting etc.). */
-string filterANSIEscapes(const string & s, bool nixOnly = false);
+/* Truncate a string to 'width' printable characters. Certain ANSI
+ escape sequences (such as colour setting) are copied but not
+ included in the character count. Other ANSI escape sequences are
+ filtered. Also, tabs are expanded to spaces. */
+std::string filterANSIEscapes(const std::string & s,
+ unsigned int width = std::numeric_limits<unsigned int>::max());
/* Base64 encoding/decoding. */
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 58366daa6..99f773451 100755
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -141,7 +141,7 @@ void mainWrapped(int argc, char * * argv)
else if (*arg == "--version")
printVersion(myName);
- else if (*arg == "--add-drv-link")
+ else if (*arg == "--add-drv-link" || *arg == "--indirect")
; // obsolete
else if (*arg == "--no-out-link" || *arg == "--no-link")
@@ -167,9 +167,6 @@ void mainWrapped(int argc, char * * argv)
buildMode = bmRepair;
}
- else if (*arg == "--hash")
- buildMode = bmHash;
-
else if (*arg == "--run-env") // obsolete
runEnv = true;
@@ -235,6 +232,8 @@ void mainWrapped(int argc, char * * argv)
myArgs.parseCmdline(args);
+ initPlugins();
+
if (packages && fromArgs)
throw UsageError("'-p' and '-E' are mutually exclusive");
@@ -279,8 +278,8 @@ void mainWrapped(int argc, char * * argv)
else
/* If we're in a #! script, interpret filenames
relative to the script. */
- exprs.push_back(state.parseExprFromFile(resolveExprPath(lookupFileArg(state,
- inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))));
+ exprs.push_back(state.parseExprFromFile(resolveExprPath(state.checkSourcePath(lookupFileArg(state,
+ inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i)))));
}
/* Evaluate them into derivations. */
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 370f216ab..ec9a7174e 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -213,6 +213,9 @@ int main(int argc, char ** argv)
}
return true;
});
+
+ initPlugins();
+
switch (cmd) {
case cNone:
throw UsageError("no command specified");
diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc
index cc663a969..37fe22f48 100644
--- a/src/nix-collect-garbage/nix-collect-garbage.cc
+++ b/src/nix-collect-garbage/nix-collect-garbage.cc
@@ -77,6 +77,8 @@ int main(int argc, char * * argv)
return true;
});
+ initPlugins();
+
auto profilesDir = settings.nixStateDir + "/profiles";
if (removeOld) removeOldGenerations(profilesDir);
diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc
index 861fc2e5c..dfb1b8fc5 100755
--- a/src/nix-copy-closure/nix-copy-closure.cc
+++ b/src/nix-copy-closure/nix-copy-closure.cc
@@ -44,6 +44,8 @@ int main(int argc, char ** argv)
return true;
});
+ initPlugins();
+
if (sshHost.empty())
throw UsageError("no host name specified");
diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc
index 5629cc64b..890bffa19 100644
--- a/src/nix-daemon/nix-daemon.cc
+++ b/src/nix-daemon/nix-daemon.cc
@@ -411,7 +411,7 @@ static void performOp(TunnelLogger * logger, ref<LocalStore> store,
/* Repairing is not atomic, so disallowed for "untrusted"
clients. */
if (mode == bmRepair && !trusted)
- throw Error("repairing is not supported when building through the Nix daemon");
+ throw Error("repairing is not allowed because you are not in 'trusted-users'");
}
logger->startWork();
store->buildPaths(drvs, mode);
@@ -994,7 +994,7 @@ static void daemonLoop(char * * argv)
if (matchUser(user, group, trustedUsers))
trusted = true;
- if (!trusted && !matchUser(user, group, allowedUsers))
+ if ((!trusted && !matchUser(user, group, allowedUsers)) || group == settings.buildUsersGroup)
throw Error(format("user '%1%' is not allowed to connect to the Nix daemon") % user);
printInfo(format((string) "accepted connection from pid %1%, user %2%" + (trusted ? " (trusted)" : ""))
@@ -1060,6 +1060,8 @@ int main(int argc, char * * argv)
return true;
});
+ initPlugins();
+
if (stdio) {
if (getStoreType() == tDaemon) {
/* Forward on this connection to the real daemon */
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 016caf6d2..97e66cbd9 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -1393,6 +1393,8 @@ int main(int argc, char * * argv)
myArgs.parseCmdline(argvToStrings(argc, argv));
+ initPlugins();
+
if (!op) throw UsageError("no operation specified");
auto store = openStore();
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index 55ac007e8..dd262bea0 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -151,6 +151,8 @@ int main(int argc, char * * argv)
myArgs.parseCmdline(argvToStrings(argc, argv));
+ initPlugins();
+
if (evalOnly && !wantsReadWrite)
settings.readOnlyMode = true;
@@ -182,7 +184,7 @@ int main(int argc, char * * argv)
for (auto & i : files) {
Expr * e = fromArgs
? state.parseExprFromString(i, absPath("."))
- : state.parseExprFromFile(resolveExprPath(lookupFileArg(state, i)));
+ : state.parseExprFromFile(resolveExprPath(state.checkSourcePath(lookupFileArg(state, i))));
processExpr(state, attrPaths, parseOnly, strict, autoArgs,
evalOnly, outputKind, xmlOutputSourceLocation, e);
}
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index fef3eaa45..fa7ee2545 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -89,6 +89,8 @@ int main(int argc, char * * argv)
myArgs.parseCmdline(argvToStrings(argc, argv));
+ initPlugins();
+
if (args.size() > 2)
throw UsageError("too many arguments");
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index f6f276dd1..efef7f15c 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -122,7 +122,6 @@ static void opRealise(Strings opFlags, Strings opArgs)
if (i == "--dry-run") dryRun = true;
else if (i == "--repair") buildMode = bmRepair;
else if (i == "--check") buildMode = bmCheck;
- else if (i == "--hash") buildMode = bmHash;
else if (i == "--ignore-unknown") ignoreUnknown = true;
else throw UsageError(format("unknown flag '%1%'") % i);
@@ -1053,6 +1052,8 @@ int main(int argc, char * * argv)
return true;
});
+ initPlugins();
+
if (!op) throw UsageError("no operation specified");
if (op != opDump && op != opRestore) /* !!! hack */
diff --git a/src/nix/build.cc b/src/nix/build.cc
index f7c99f12d..b4f21b32d 100644
--- a/src/nix/build.cc
+++ b/src/nix/build.cc
@@ -50,7 +50,7 @@ struct CmdBuild : MixDryRun, InstallablesCommand
void run(ref<Store> store) override
{
- auto buildables = toBuildables(store, dryRun ? DryRun : Build, installables);
+ auto buildables = build(store, dryRun ? DryRun : Build, installables);
for (size_t i = 0; i < buildables.size(); ++i) {
auto & b(buildables[i]);
diff --git a/src/nix/command.hh b/src/nix/command.hh
index 6b34e3881..97a6fee7f 100644
--- a/src/nix/command.hh
+++ b/src/nix/command.hh
@@ -5,6 +5,8 @@
namespace nix {
+extern std::string programPath;
+
struct Value;
class Bindings;
class EvalState;
@@ -196,7 +198,7 @@ std::shared_ptr<Installable> parseInstallable(
SourceExprCommand & cmd, ref<Store> store, const std::string & installable,
bool useDefaultInstallables);
-Buildables toBuildables(ref<Store> store, RealiseMode mode,
+Buildables build(ref<Store> store, RealiseMode mode,
std::vector<std::shared_ptr<Installable>> installables);
PathSet toStorePaths(ref<Store> store, RealiseMode mode,
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index 0fbeca1c1..b7058361c 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -5,10 +5,11 @@
#include "eval.hh"
#include "json.hh"
#include "value-to-json.hh"
+#include "progress-bar.hh"
using namespace nix;
-struct CmdEval : MixJSON, InstallablesCommand
+struct CmdEval : MixJSON, InstallableCommand
{
bool raw = false;
@@ -56,20 +57,19 @@ struct CmdEval : MixJSON, InstallablesCommand
auto state = getEvalState();
- auto jsonOut = json ? std::make_unique<JSONList>(std::cout) : nullptr;
+ auto v = installable->toValue(*state);
+ PathSet context;
- for (auto & i : installables) {
- auto v = i->toValue(*state);
- PathSet context;
- if (raw) {
- std::cout << state->coerceToString(noPos, *v, context);
- } else if (json) {
- auto jsonElem = jsonOut->placeholder();
- printValueAsJSON(*state, true, *v, jsonElem, context);
- } else {
- state->forceValueDeep(*v);
- std::cout << *v << "\n";
- }
+ stopProgressBar();
+
+ if (raw) {
+ std::cout << state->coerceToString(noPos, *v, context);
+ } else if (json) {
+ JSONPlaceholder jsonOut(std::cout);
+ printValueAsJSON(*state, true, *v, jsonOut, context);
+ } else {
+ state->forceValueDeep(*v);
+ std::cout << *v << "\n";
}
}
};
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index ae93c4ef6..a3fdd8a28 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -30,10 +30,8 @@ Value * SourceExprCommand::getSourceExpr(EvalState & state)
vSourceExpr = state.allocValue();
- if (file != "") {
- Expr * e = state.parseExprFromFile(resolveExprPath(lookupFileArg(state, file)));
- state.eval(e, *vSourceExpr);
- }
+ if (file != "")
+ state.evalFile(lookupFileArg(state, file), *vSourceExpr);
else {
@@ -255,7 +253,7 @@ std::shared_ptr<Installable> parseInstallable(
return installables.front();
}
-Buildables toBuildables(ref<Store> store, RealiseMode mode,
+Buildables build(ref<Store> store, RealiseMode mode,
std::vector<std::shared_ptr<Installable>> installables)
{
if (mode != Build)
@@ -293,7 +291,7 @@ PathSet toStorePaths(ref<Store> store, RealiseMode mode,
{
PathSet outPaths;
- for (auto & b : toBuildables(store, mode, installables))
+ for (auto & b : build(store, mode, installables))
for (auto & output : b.outputs)
outPaths.insert(output.second);
diff --git a/src/nix/local.mk b/src/nix/local.mk
index bddd53b16..f76da1944 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -6,4 +6,6 @@ nix_SOURCES := $(wildcard $(d)/*.cc) $(wildcard src/linenoise/*.cpp)
nix_LIBS = libexpr libmain libstore libutil libformat
+nix_LDFLAGS = -pthread
+
$(eval $(call install-symlink, nix, $(bindir)/nix-hash))
diff --git a/src/nix/log.cc b/src/nix/log.cc
index 966ad8b65..f07ec4e93 100644
--- a/src/nix/log.cc
+++ b/src/nix/log.cc
@@ -50,6 +50,7 @@ struct CmdLog : InstallableCommand
auto b = installable->toBuildable();
+ RunPager pager;
for (auto & sub : subs) {
auto log = b.drvPath != "" ? sub->getBuildLog(b.drvPath) : nullptr;
for (auto & output : b.outputs) {
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 06bb8a1c3..bb107ec7d 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -16,6 +16,8 @@ void chrootHelper(int argc, char * * argv);
namespace nix {
+std::string programPath;
+
struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
{
NixArgs() : MultiCommand(*RegisterCommand::commands), MixCommonArgs("nix")
@@ -78,7 +80,8 @@ void mainWrapped(int argc, char * * argv)
initNix();
initGC();
- string programName = baseNameOf(argv[0]);
+ programPath = argv[0];
+ string programName = baseNameOf(programPath);
{
auto legacy = (*RegisterLegacyCommand::commands)[programName];
@@ -89,6 +92,8 @@ void mainWrapped(int argc, char * * argv)
args.parseCmdline(argvToStrings(argc, argv));
+ initPlugins();
+
if (!args.command) args.showHelpAndExit();
Finally f([]() { stopProgressBar(); });
diff --git a/src/nix/progress-bar.cc b/src/nix/progress-bar.cc
index fb9955190..e6553c06f 100644
--- a/src/nix/progress-bar.cc
+++ b/src/nix/progress-bar.cc
@@ -3,8 +3,9 @@
#include "sync.hh"
#include "store-api.hh"
-#include <map>
#include <atomic>
+#include <map>
+#include <thread>
namespace nix {
@@ -22,44 +23,6 @@ static uint64_t getI(const std::vector<Logger::Field> & fields, size_t n)
return fields[n].i;
}
-/* Truncate a string to 'width' printable characters. ANSI escape
- sequences are copied but not included in the character count. Also,
- tabs are expanded to spaces. */
-static std::string ansiTruncate(const std::string & s, int width)
-{
- if (width <= 0) return s;
-
- std::string t;
- size_t w = 0;
- auto i = s.begin();
-
- while (w < (size_t) width && i != s.end()) {
- if (*i == '\e') {
- t += *i++;
- if (i != s.end() && *i == '[') {
- t += *i++;
- while (i != s.end() && (*i < 0x40 || *i > 0x7e)) {
- t += *i++;
- }
- if (i != s.end()) t += *i++;
- }
- }
-
- else if (*i == '\t') {
- t += ' '; w++;
- while (w < (size_t) width && w & 8) {
- t += ' '; w++;
- }
- }
-
- else {
- t += *i++; w++;
- }
- }
-
- return t;
-}
-
class ProgressBar : public Logger
{
private:
@@ -101,15 +64,28 @@ private:
Sync<State> state_;
+ std::thread updateThread;
+
+ std::condition_variable quitCV, updateCV;
+
public:
ProgressBar()
{
+ updateThread = std::thread([&]() {
+ auto state(state_.lock());
+ while (state->active) {
+ state.wait(updateCV);
+ draw(*state);
+ state.wait_for(quitCV, std::chrono::milliseconds(50));
+ }
+ });
}
~ProgressBar()
{
stop();
+ updateThread.join();
}
void stop()
@@ -121,6 +97,8 @@ public:
writeToStderr("\r\e[K");
if (status != "")
writeToStderr("[" + status + "]\n");
+ updateCV.notify_one();
+ quitCV.notify_one();
}
void log(Verbosity lvl, const FormatOrString & fs) override
@@ -132,7 +110,7 @@ public:
void log(State & state, Verbosity lvl, const std::string & s)
{
writeToStderr("\r\e[K" + s + ANSI_NORMAL "\n");
- update(state);
+ draw(state);
}
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
@@ -167,7 +145,12 @@ public:
if (type == actSubstitute) {
auto name = storePathToName(getS(fields, 0));
- i->s = fmt("fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s", name, getS(fields, 1));
+ auto sub = getS(fields, 1);
+ i->s = fmt(
+ hasPrefix(sub, "local")
+ ? "copying " ANSI_BOLD "%s" ANSI_NORMAL " from %s"
+ : "fetching " ANSI_BOLD "%s" ANSI_NORMAL " from %s",
+ name, sub);
}
if (type == actQueryPathInfo) {
@@ -180,7 +163,7 @@ public:
|| (type == actCopyPath && hasAncestor(*state, actSubstitute, parent)))
i->visible = false;
- update(*state);
+ update();
}
/* Check whether an activity has an ancestore with the specified
@@ -215,7 +198,7 @@ public:
state->its.erase(i);
}
- update(*state);
+ update();
}
void result(ActivityId act, ResultType type, const std::vector<Field> & fields) override
@@ -225,7 +208,7 @@ public:
if (type == resFileLinked) {
state->filesLinked++;
state->bytesLinked += getI(fields, 0);
- update(*state);
+ update();
}
else if (type == resBuildLogLine) {
@@ -238,25 +221,25 @@ public:
info.lastLine = lastLine;
state->activities.emplace_back(info);
i->second = std::prev(state->activities.end());
- update(*state);
+ update();
}
}
else if (type == resUntrustedPath) {
state->untrustedPaths++;
- update(*state);
+ update();
}
else if (type == resCorruptedPath) {
state->corruptedPaths++;
- update(*state);
+ update();
}
else if (type == resSetPhase) {
auto i = state->its.find(act);
assert(i != state->its.end());
i->second->phase = getS(fields, 0);
- update(*state);
+ update();
}
else if (type == resProgress) {
@@ -267,7 +250,7 @@ public:
actInfo.expected = getI(fields, 1);
actInfo.running = getI(fields, 2);
actInfo.failed = getI(fields, 3);
- update(*state);
+ update();
}
else if (type == resSetExpected) {
@@ -279,17 +262,16 @@ public:
state->activitiesByType[type].expected -= j;
j = getI(fields, 1);
state->activitiesByType[type].expected += j;
- update(*state);
+ update();
}
}
void update()
{
- auto state(state_.lock());
- update(*state);
+ updateCV.notify_one();
}
- void update(State & state)
+ void draw(State & state)
{
if (!state.active) return;
@@ -323,7 +305,10 @@ public:
}
}
- writeToStderr("\r" + ansiTruncate(line, getWindowSize().second) + "\e[K");
+ auto width = getWindowSize().second;
+ if (width <= 0) std::numeric_limits<decltype(width)>::max();
+
+ writeToStderr("\r" + filterANSIEscapes(line, width) + "\e[K");
}
std::string getStatus(State & state)
diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc
new file mode 100644
index 000000000..758bbbc68
--- /dev/null
+++ b/src/nix/upgrade-nix.cc
@@ -0,0 +1,131 @@
+#include "command.hh"
+#include "store-api.hh"
+#include "download.hh"
+#include "eval.hh"
+#include "attr-path.hh"
+
+using namespace nix;
+
+struct CmdUpgradeNix : StoreCommand
+{
+ Path profileDir;
+
+ CmdUpgradeNix()
+ {
+ mkFlag()
+ .longName("profile")
+ .shortName('p')
+ .labels({"profile-dir"})
+ .description("the Nix profile to upgrade")
+ .dest(&profileDir);
+ }
+
+ std::string name() override
+ {
+ return "upgrade-nix";
+ }
+
+ std::string description() override
+ {
+ return "upgrade Nix to the latest stable version";
+ }
+
+ Examples examples() override
+ {
+ return {
+ Example{
+ "To upgrade Nix to the latest stable version:",
+ "nix upgrade-nix"
+ },
+ Example{
+ "To upgrade Nix in a specific profile:",
+ "nix upgrade-nix -p /nix/var/nix/profiles/per-user/alice/profile"
+ },
+ };
+ }
+
+ void run(ref<Store> store) override
+ {
+ settings.pureEval = true;
+
+ if (profileDir == "")
+ profileDir = getProfileDir(store);
+
+ printInfo("upgrading Nix in profile '%s'", profileDir);
+
+ Path storePath;
+ {
+ Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
+ storePath = getLatestNix(store);
+ }
+
+ {
+ Activity act(*logger, lvlInfo, actUnknown, fmt("downloading '%s'...", storePath));
+ store->ensurePath(storePath);
+ }
+
+ {
+ Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", storePath));
+ auto program = storePath + "/bin/nix-env";
+ auto s = runProgram(program, false, {"--version"});
+ if (s.find("Nix") == std::string::npos)
+ throw Error("could not verify that '%s' works", program);
+ }
+
+ {
+ Activity act(*logger, lvlInfo, actUnknown, fmt("installing '%s' into profile '%s'...", storePath, profileDir));
+ runProgram(settings.nixBinDir + "/nix-env", false,
+ {"--profile", profileDir, "-i", storePath, "--no-sandbox"});
+ }
+ }
+
+ /* Return the profile in which Nix is installed. */
+ Path getProfileDir(ref<Store> store)
+ {
+ Path where;
+
+ for (auto & dir : tokenizeString<Strings>(getEnv("PATH"), ":"))
+ if (pathExists(dir + "/nix-env")) {
+ where = dir;
+ break;
+ }
+
+ if (where == "")
+ throw Error("couldn't figure out how Nix is installed, so I can't upgrade it");
+
+ printInfo("found Nix in '%s'", where);
+
+ if (hasPrefix(where, "/run/current-system"))
+ throw Error("Nix on NixOS must be upgraded via 'nixos-rebuild'");
+
+ Path profileDir;
+ Path userEnv;
+
+ if (baseNameOf(where) != "bin" ||
+ !hasSuffix(userEnv = canonPath(profileDir = dirOf(where), true), "user-environment"))
+ throw Error("directory '%s' does not appear to be part of a Nix profile", where);
+
+ if (!store->isValidPath(userEnv))
+ throw Error("directory '%s' is not in the Nix store", userEnv);
+
+ return profileDir;
+ }
+
+ /* Return the store path of the latest stable Nix. */
+ Path getLatestNix(ref<Store> store)
+ {
+ // FIXME: use nixos.org?
+ auto req = DownloadRequest("https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix");
+ auto res = getDownloader()->download(req);
+
+ EvalState state(Strings(), store);
+ auto v = state.allocValue();
+ state.eval(state.parseExprFromString(*res.data, "/no-such-path"), *v);
+ Bindings & bindings(*state.allocBindings(0));
+ auto v2 = findAlongAttrPath(state, settings.thisSystem, bindings, *v);
+
+ return state.forceString(*v2);
+ }
+};
+
+static RegisterCommand r1(make_ref<CmdUpgradeNix>());
diff --git a/src/nlohmann/json.hpp b/src/nlohmann/json.hpp
index 9754e464c..5b0b0ea5b 100644
--- a/src/nlohmann/json.hpp
+++ b/src/nlohmann/json.hpp
@@ -1,7 +1,7 @@
/*
__ _____ _____ _____
__| | __| | | | JSON for Modern C++
-| | |__ | | | | | | version 2.1.1
+| | |__ | | | | | | version 3.0.1
|_____|_____|_____|_|___| https://github.com/nlohmann/json
Licensed under the MIT License <http://opensource.org/licenses/MIT>.
@@ -109,7 +109,7 @@ SOFTWARE.
#define JSON_UNLIKELY(x) x
#endif
-// cpp language standard detection
+// C++ language standard detection
#if (defined(__cplusplus) && __cplusplus >= 201703L) || (defined(_HAS_CXX17) && _HAS_CXX17 == 1) // fix for issue #464
#define JSON_HAS_CPP_17
#define JSON_HAS_CPP_14
@@ -128,20 +128,18 @@ template<typename = void, typename = void>
struct adl_serializer;
// forward declaration of basic_json (required to split the class)
-template<template<typename U, typename V, typename... Args> class ObjectType =
- std::map,
- template<typename U, typename... Args> class ArrayType = std::vector,
+template<template<typename, typename, typename...> class ObjectType = std::map,
+ template<typename, typename...> class ArrayType = std::vector,
class StringType = std::string, class BooleanType = bool,
class NumberIntegerType = std::int64_t,
class NumberUnsignedType = std::uint64_t,
class NumberFloatType = double,
- template<typename U> class AllocatorType = std::allocator,
- template<typename T, typename SFINAE = void> class JSONSerializer =
- adl_serializer>
+ template<typename> class AllocatorType = std::allocator,
+ template<typename, typename = void> class JSONSerializer = adl_serializer>
class basic_json;
-// Ugly macros to avoid uglier copy-paste when specializing basic_json
-// This is only temporary and will be removed in 3.0
+// Ugly macros to avoid uglier copy-paste when specializing basic_json. They
+// may be removed in the future once the class is split.
#define NLOHMANN_BASIC_JSON_TPL_DECLARATION \
template<template<typename, typename, typename...> class ObjectType, \
@@ -227,7 +225,7 @@ class exception : public std::exception
/*!
@brief exception indicating a parse error
-This excpetion is thrown by the library when a parse error occurs. Parse errors
+This exception is thrown by the library when a parse error occurs. Parse errors
can occur during the deserialization of JSON text, CBOR, MessagePack, as well
as when using JSON Patch.
@@ -243,12 +241,12 @@ json.exception.parse_error.102 | parse error at 14: missing or wrong low surroga
json.exception.parse_error.103 | parse error: code points above 0x10FFFF are invalid | Unicode supports code points up to 0x10FFFF. Code points above 0x10FFFF are invalid.
json.exception.parse_error.104 | parse error: JSON patch must be an array of objects | [RFC 6902](https://tools.ietf.org/html/rfc6902) requires a JSON Patch document to be a JSON document that represents an array of objects.
json.exception.parse_error.105 | parse error: operation must have string member 'op' | An operation of a JSON Patch document must contain exactly one "op" member, whose value indicates the operation to perform. Its value must be one of "add", "remove", "replace", "move", "copy", or "test"; other values are errors.
-json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number wihtout a leading `0`.
+json.exception.parse_error.106 | parse error: array index '01' must not begin with '0' | An array index in a JSON Pointer ([RFC 6901](https://tools.ietf.org/html/rfc6901)) may be `0` or any number without a leading `0`.
json.exception.parse_error.107 | parse error: JSON pointer must be empty or begin with '/' - was: 'foo' | A JSON Pointer must be a Unicode string containing a sequence of zero or more reference tokens, each prefixed by a `/` character.
json.exception.parse_error.108 | parse error: escape character '~' must be followed with '0' or '1' | In a JSON Pointer, only `~0` and `~1` are valid escape sequences.
json.exception.parse_error.109 | parse error: array index 'one' is not a number | A JSON Pointer array index must be a number.
json.exception.parse_error.110 | parse error at 1: cannot read 2 bytes from vector | When parsing CBOR or MessagePack, the byte vector ends before the complete value has been read.
-json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xf8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read.
+json.exception.parse_error.112 | parse error at 1: error reading CBOR; last byte: 0xF8 | Not all types of CBOR or MessagePack are supported. This exception occurs if an unsupported byte was read.
json.exception.parse_error.113 | parse error at 2: expected a CBOR string; last byte: 0x98 | While parsing a map key, a value that is not a string has been read.
@note For an input with n bytes, 1 is the index of the first character and n+1
@@ -378,6 +376,7 @@ json.exception.type_error.312 | cannot use update() with string | The @ref updat
json.exception.type_error.313 | invalid value to unflatten | The @ref unflatten function converts an object whose keys are JSON Pointers back into an arbitrary nested JSON value. The JSON Pointers must not overlap, because then the resulting value would not be well defined.
json.exception.type_error.314 | only objects can be unflattened | The @ref unflatten function only works for an object whose keys are JSON Pointers.
json.exception.type_error.315 | values in object must be primitive | The @ref unflatten function only works for an object whose keys are JSON Pointers and whose values are primitive.
+json.exception.type_error.316 | invalid UTF-8 byte at index 10: 0x7E | The @ref dump function only works with UTF-8 encoded strings; that is, if you assign a `std::string` to a JSON value, make sure it is UTF-8 encoded. |
@liveexample{The following code shows how a `type_error` exception can be
caught.,type_error}
@@ -457,7 +456,6 @@ Exceptions have ids 5xx.
name / id | example message | description
------------------------------ | --------------- | -------------------------
json.exception.other_error.501 | unsuccessful: {"op":"test","path":"/baz", "value":"bar"} | A JSON Patch operation 'test' failed. The unsuccessful operation is also printed.
-json.exception.other_error.502 | invalid object size for conversion | Some conversions to user-defined types impose constraints on the object size (e.g. std::pair)
@sa @ref exception for the base class of the library exceptions
@sa @ref parse_error for exceptions indicating a parse error
@@ -540,20 +538,14 @@ Returns an ordering that is similar to Python:
inline bool operator<(const value_t lhs, const value_t rhs) noexcept
{
static constexpr std::array<uint8_t, 8> order = {{
- 0, // null
- 3, // object
- 4, // array
- 5, // string
- 1, // boolean
- 2, // integer
- 2, // unsigned
- 2, // float
+ 0 /* null */, 3 /* object */, 4 /* array */, 5 /* string */,
+ 1 /* boolean */, 2 /* integer */, 2 /* unsigned */, 2 /* float */
}
};
const auto l_index = static_cast<std::size_t>(lhs);
const auto r_index = static_cast<std::size_t>(rhs);
- return (l_index < order.size() and r_index < order.size() and order[l_index] < order[r_index]);
+ return l_index < order.size() and r_index < order.size() and order[l_index] < order[r_index];
}
@@ -591,17 +583,15 @@ struct merge_and_renumber;
template<std::size_t... I1, std::size_t... I2>
struct merge_and_renumber<index_sequence<I1...>, index_sequence<I2...>>
- : index_sequence < I1..., (sizeof...(I1) + I2)... >
- {};
+ : index_sequence < I1..., (sizeof...(I1) + I2)... > {};
template<std::size_t N>
struct make_index_sequence
: merge_and_renumber < typename make_index_sequence < N / 2 >::type,
- typename make_index_sequence < N - N / 2 >::type >
-{};
+ typename make_index_sequence < N - N / 2 >::type > {};
-template<> struct make_index_sequence<0> : index_sequence<> { };
-template<> struct make_index_sequence<1> : index_sequence<0> { };
+template<> struct make_index_sequence<0> : index_sequence<> {};
+template<> struct make_index_sequence<1> : index_sequence<0> {};
template<typename... Ts>
using index_sequence_for = make_index_sequence<sizeof...(Ts)>;
@@ -624,7 +614,7 @@ template<class B1> struct conjunction<B1> : B1 {};
template<class B1, class... Bn>
struct conjunction<B1, Bn...> : std::conditional<bool(B1::value), conjunction<Bn...>, B1>::type {};
-template<class B> struct negation : std::integral_constant < bool, !B::value > {};
+template<class B> struct negation : std::integral_constant<bool, not B::value> {};
// dispatch utility (taken from ranges-v3)
template<unsigned N> struct priority_tag : priority_tag < N - 1 > {};
@@ -725,8 +715,7 @@ struct external_constructor<value_t::array>
}
template<typename BasicJsonType, typename CompatibleArrayType,
- enable_if_t<not std::is_same<CompatibleArrayType,
- typename BasicJsonType::array_t>::value,
+ enable_if_t<not std::is_same<CompatibleArrayType, typename BasicJsonType::array_t>::value,
int> = 0>
static void construct(BasicJsonType& j, const CompatibleArrayType& arr)
{
@@ -743,7 +732,7 @@ struct external_constructor<value_t::array>
j.m_type = value_t::array;
j.m_value = value_t::array;
j.m_value.array->reserve(arr.size());
- for (bool x : arr)
+ for (const bool x : arr)
{
j.m_value.array->push_back(x);
}
@@ -782,8 +771,7 @@ struct external_constructor<value_t::object>
}
template<typename BasicJsonType, typename CompatibleObjectType,
- enable_if_t<not std::is_same<CompatibleObjectType,
- typename BasicJsonType::object_t>::value, int> = 0>
+ enable_if_t<not std::is_same<CompatibleObjectType, typename BasicJsonType::object_t>::value, int> = 0>
static void construct(BasicJsonType& j, const CompatibleObjectType& obj)
{
using std::begin;
@@ -896,7 +884,7 @@ struct is_compatible_integer_type
is_compatible_integer_type_impl <
std::is_integral<CompatibleNumberIntegerType>::value and
not std::is_same<bool, CompatibleNumberIntegerType>::value,
- RealIntegerType, CompatibleNumberIntegerType > ::value;
+ RealIntegerType, CompatibleNumberIntegerType >::value;
};
@@ -922,10 +910,8 @@ template<typename BasicJsonType, typename T>
struct has_non_default_from_json
{
private:
- template <
- typename U,
- typename = enable_if_t<std::is_same<
- T, decltype(uncvref_t<U>::from_json(std::declval<BasicJsonType>()))>::value >>
+ template<typename U, typename =
+ enable_if_t<std::is_same<T, decltype(uncvref_t<U>::from_json(std::declval<BasicJsonType>()))>::value>>
static int detect(U&&);
static void detect(...);
@@ -954,22 +940,21 @@ struct has_to_json
// to_json //
/////////////
-template<typename BasicJsonType, typename T, enable_if_t<
- std::is_same<T, typename BasicJsonType::boolean_t>::value, int> = 0>
+template<typename BasicJsonType, typename T,
+ enable_if_t<std::is_same<T, typename BasicJsonType::boolean_t>::value, int> = 0>
void to_json(BasicJsonType& j, T b) noexcept
{
external_constructor<value_t::boolean>::construct(j, b);
}
template<typename BasicJsonType, typename CompatibleString,
- enable_if_t<std::is_constructible<typename BasicJsonType::string_t,
- CompatibleString>::value, int> = 0>
+ enable_if_t<std::is_constructible<typename BasicJsonType::string_t, CompatibleString>::value, int> = 0>
void to_json(BasicJsonType& j, const CompatibleString& s)
{
external_constructor<value_t::string>::construct(j, s);
}
-template <typename BasicJsonType>
+template<typename BasicJsonType>
void to_json(BasicJsonType& j, typename BasicJsonType::string_t&& s)
{
external_constructor<value_t::string>::construct(j, std::move(s));
@@ -982,19 +967,15 @@ void to_json(BasicJsonType& j, FloatType val) noexcept
external_constructor<value_t::number_float>::construct(j, static_cast<typename BasicJsonType::number_float_t>(val));
}
-template <
- typename BasicJsonType, typename CompatibleNumberUnsignedType,
- enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_unsigned_t,
- CompatibleNumberUnsignedType>::value, int> = 0 >
+template<typename BasicJsonType, typename CompatibleNumberUnsignedType,
+ enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_unsigned_t, CompatibleNumberUnsignedType>::value, int> = 0>
void to_json(BasicJsonType& j, CompatibleNumberUnsignedType val) noexcept
{
external_constructor<value_t::number_unsigned>::construct(j, static_cast<typename BasicJsonType::number_unsigned_t>(val));
}
-template <
- typename BasicJsonType, typename CompatibleNumberIntegerType,
- enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_integer_t,
- CompatibleNumberIntegerType>::value, int> = 0 >
+template<typename BasicJsonType, typename CompatibleNumberIntegerType,
+ enable_if_t<is_compatible_integer_type<typename BasicJsonType::number_integer_t, CompatibleNumberIntegerType>::value, int> = 0>
void to_json(BasicJsonType& j, CompatibleNumberIntegerType val) noexcept
{
external_constructor<value_t::number_integer>::construct(j, static_cast<typename BasicJsonType::number_integer_t>(val));
@@ -1014,49 +995,43 @@ void to_json(BasicJsonType& j, const std::vector<bool>& e)
external_constructor<value_t::array>::construct(j, e);
}
-template <
- typename BasicJsonType, typename CompatibleArrayType,
- enable_if_t <
- is_compatible_array_type<BasicJsonType, CompatibleArrayType>::value or
- std::is_same<typename BasicJsonType::array_t, CompatibleArrayType>::value,
- int > = 0 >
+template<typename BasicJsonType, typename CompatibleArrayType,
+ enable_if_t<is_compatible_array_type<BasicJsonType, CompatibleArrayType>::value or
+ std::is_same<typename BasicJsonType::array_t, CompatibleArrayType>::value,
+ int> = 0>
void to_json(BasicJsonType& j, const CompatibleArrayType& arr)
{
external_constructor<value_t::array>::construct(j, arr);
}
-template <typename BasicJsonType, typename T,
- enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0>
+template<typename BasicJsonType, typename T,
+ enable_if_t<std::is_convertible<T, BasicJsonType>::value, int> = 0>
void to_json(BasicJsonType& j, std::valarray<T> arr)
{
external_constructor<value_t::array>::construct(j, std::move(arr));
}
-template <typename BasicJsonType>
+template<typename BasicJsonType>
void to_json(BasicJsonType& j, typename BasicJsonType::array_t&& arr)
{
external_constructor<value_t::array>::construct(j, std::move(arr));
}
-template <
- typename BasicJsonType, typename CompatibleObjectType,
- enable_if_t<is_compatible_object_type<BasicJsonType, CompatibleObjectType>::value,
- int> = 0 >
+template<typename BasicJsonType, typename CompatibleObjectType,
+ enable_if_t<is_compatible_object_type<BasicJsonType, CompatibleObjectType>::value, int> = 0>
void to_json(BasicJsonType& j, const CompatibleObjectType& obj)
{
external_constructor<value_t::object>::construct(j, obj);
}
-template <typename BasicJsonType>
+template<typename BasicJsonType>
void to_json(BasicJsonType& j, typename BasicJsonType::object_t&& obj)
{
external_constructor<value_t::object>::construct(j, std::move(obj));
}
template<typename BasicJsonType, typename T, std::size_t N,
- enable_if_t<not std::is_constructible<
- typename BasicJsonType::string_t, T (&)[N]>::value,
- int> = 0>
+ enable_if_t<not std::is_constructible<typename BasicJsonType::string_t, T (&)[N]>::value, int> = 0>
void to_json(BasicJsonType& j, T (&arr)[N])
{
external_constructor<value_t::array>::construct(j, arr);
@@ -1087,8 +1062,7 @@ void to_json(BasicJsonType& j, const std::tuple<Args...>& t)
// overloads for basic_json template parameters
template<typename BasicJsonType, typename ArithmeticType,
enable_if_t<std::is_arithmetic<ArithmeticType>::value and
- not std::is_same<ArithmeticType,
- typename BasicJsonType::boolean_t>::value,
+ not std::is_same<ArithmeticType, typename BasicJsonType::boolean_t>::value,
int> = 0>
void get_arithmetic_value(const BasicJsonType& j, ArithmeticType& val)
{
@@ -1351,6 +1325,13 @@ struct to_json_fn
{
static_assert(sizeof(BasicJsonType) == 0,
"could not find to_json() method in T's namespace");
+
+#ifdef _MSC_VER
+ // MSVC does not show a stacktrace for the above assert
+ using decayed = uncvref_t<T>;
+ static_assert(sizeof(typename decayed::force_msvc_stacktrace) == 0,
+ "forcing MSVC stacktrace to show which T we're talking about.");
+#endif
}
public:
@@ -1378,6 +1359,12 @@ struct from_json_fn
{
static_assert(sizeof(BasicJsonType) == 0,
"could not find from_json() method in T's namespace");
+#ifdef _MSC_VER
+ // MSVC does not show a stacktrace for the above assert
+ using decayed = uncvref_t<T>;
+ static_assert(sizeof(typename decayed::force_msvc_stacktrace) == 0,
+ "forcing MSVC stacktrace to show which T we're talking about.");
+#endif
}
public:
@@ -1448,7 +1435,7 @@ class input_stream_adapter : public input_adapter_protocol
explicit input_stream_adapter(std::istream& i)
: is(i), sb(*i.rdbuf())
{
- // ignore Byte Order Mark at start of input
+ // skip byte order mark
std::char_traits<char>::int_type c;
if ((c = get_character()) == 0xEF)
{
@@ -1472,7 +1459,7 @@ class input_stream_adapter : public input_adapter_protocol
}
else if (c != std::char_traits<char>::eof())
{
- is.unget(); // Not BOM. Process as usual.
+ is.unget(); // no byte order mark; process as usual
}
}
@@ -1481,8 +1468,8 @@ class input_stream_adapter : public input_adapter_protocol
input_stream_adapter& operator=(input_stream_adapter&) = delete;
// std::istream/std::streambuf use std::char_traits<char>::to_int_type, to
- // ensure that std::char_traits<char>::eof() and the character 0xff do not
- // end up as the same value, eg. 0xffffffff.
+ // ensure that std::char_traits<char>::eof() and the character 0xFF do not
+ // end up as the same value, eg. 0xFFFFFFFF.
std::char_traits<char>::int_type get_character() override
{
return sb.sbumpc();
@@ -1561,8 +1548,7 @@ class input_adapter
template<typename CharT,
typename std::enable_if<
std::is_pointer<CharT>::value and
- std::is_integral<
- typename std::remove_pointer<CharT>::type>::value and
+ std::is_integral<typename std::remove_pointer<CharT>::type>::value and
sizeof(typename std::remove_pointer<CharT>::type) == 1,
int>::type = 0>
input_adapter(CharT b, std::size_t l)
@@ -1574,8 +1560,7 @@ class input_adapter
template<typename CharT,
typename std::enable_if<
std::is_pointer<CharT>::value and
- std::is_integral<
- typename std::remove_pointer<CharT>::type>::value and
+ std::is_integral<typename std::remove_pointer<CharT>::type>::value and
sizeof(typename std::remove_pointer<CharT>::type) == 1,
int>::type = 0>
input_adapter(CharT b)
@@ -1585,8 +1570,7 @@ class input_adapter
/// input adapter for iterator range with contiguous storage
template<class IteratorType,
typename std::enable_if<
- std::is_same<typename std::iterator_traits<IteratorType>::iterator_category,
- std::random_access_iterator_tag>::value,
+ std::is_same<typename std::iterator_traits<IteratorType>::iterator_category, std::random_access_iterator_tag>::value,
int>::type = 0>
input_adapter(IteratorType first, IteratorType last)
{
@@ -1624,13 +1608,10 @@ class input_adapter
: input_adapter(std::begin(array), std::end(array)) {}
/// input adapter for contiguous container
- template <
- class ContiguousContainer,
- typename std::enable_if <
- not std::is_pointer<ContiguousContainer>::value and
- std::is_base_of<std::random_access_iterator_tag,
- typename std::iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value,
- int >::type = 0 >
+ template<class ContiguousContainer, typename
+ std::enable_if<not std::is_pointer<ContiguousContainer>::value and
+ std::is_base_of<std::random_access_iterator_tag, typename std::iterator_traits<decltype(std::begin(std::declval<ContiguousContainer const>()))>::iterator_category>::value,
+ int>::type = 0>
input_adapter(const ContiguousContainer& c)
: input_adapter(std::begin(c), std::end(c)) {}
@@ -1804,6 +1785,12 @@ class lexer
checks if it is inside the range. If a violation was detected, set up an
error message and return false. Otherwise, return true.
+ @param[in] ranges list of integers; interpreted as list of pairs of
+ inclusive lower and upper bound, respectively
+
+ @pre The passed list @a ranges must have 2, 4, or 6 elements; that is,
+ 1, 2, or 3 pairs. This precondition is enforced by an assertion.
+
@return true if and only if no range violation was detected
*/
bool next_byte_in_range(std::initializer_list<int> ranges)
@@ -1970,19 +1957,19 @@ class lexer
// result of the above calculation yields a proper codepoint
assert(0x00 <= codepoint and codepoint <= 0x10FFFF);
- // translate code point to bytes
+ // translate codepoint into bytes
if (codepoint < 0x80)
{
// 1-byte characters: 0xxxxxxx (ASCII)
add(codepoint);
}
- else if (codepoint <= 0x7ff)
+ else if (codepoint <= 0x7FF)
{
// 2-byte characters: 110xxxxx 10xxxxxx
add(0xC0 | (codepoint >> 6));
add(0x80 | (codepoint & 0x3F));
}
- else if (codepoint <= 0xffff)
+ else if (codepoint <= 0xFFFF)
{
// 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx
add(0xE0 | (codepoint >> 12));
@@ -2021,12 +2008,12 @@ class lexer
case 0x07:
case 0x08:
case 0x09:
- case 0x0a:
- case 0x0b:
- case 0x0c:
- case 0x0d:
- case 0x0e:
- case 0x0f:
+ case 0x0A:
+ case 0x0B:
+ case 0x0C:
+ case 0x0D:
+ case 0x0E:
+ case 0x0F:
case 0x10:
case 0x11:
case 0x12:
@@ -2037,12 +2024,12 @@ class lexer
case 0x17:
case 0x18:
case 0x19:
- case 0x1a:
- case 0x1b:
- case 0x1c:
- case 0x1d:
- case 0x1e:
- case 0x1f:
+ case 0x1A:
+ case 0x1B:
+ case 0x1C:
+ case 0x1D:
+ case 0x1E:
+ case 0x1F:
{
error_message = "invalid string: control character must be escaped";
return token_type::parse_error;
@@ -2058,12 +2045,12 @@ class lexer
case 0x27:
case 0x28:
case 0x29:
- case 0x2a:
- case 0x2b:
- case 0x2c:
- case 0x2d:
- case 0x2e:
- case 0x2f:
+ case 0x2A:
+ case 0x2B:
+ case 0x2C:
+ case 0x2D:
+ case 0x2E:
+ case 0x2F:
case 0x30:
case 0x31:
case 0x32:
@@ -2074,12 +2061,12 @@ class lexer
case 0x37:
case 0x38:
case 0x39:
- case 0x3a:
- case 0x3b:
- case 0x3c:
- case 0x3d:
- case 0x3e:
- case 0x3f:
+ case 0x3A:
+ case 0x3B:
+ case 0x3C:
+ case 0x3D:
+ case 0x3E:
+ case 0x3F:
case 0x40:
case 0x41:
case 0x42:
@@ -2090,12 +2077,12 @@ class lexer
case 0x47:
case 0x48:
case 0x49:
- case 0x4a:
- case 0x4b:
- case 0x4c:
- case 0x4d:
- case 0x4e:
- case 0x4f:
+ case 0x4A:
+ case 0x4B:
+ case 0x4C:
+ case 0x4D:
+ case 0x4E:
+ case 0x4F:
case 0x50:
case 0x51:
case 0x52:
@@ -2106,11 +2093,11 @@ class lexer
case 0x57:
case 0x58:
case 0x59:
- case 0x5a:
- case 0x5b:
- case 0x5d:
- case 0x5e:
- case 0x5f:
+ case 0x5A:
+ case 0x5B:
+ case 0x5D:
+ case 0x5E:
+ case 0x5F:
case 0x60:
case 0x61:
case 0x62:
@@ -2121,12 +2108,12 @@ class lexer
case 0x67:
case 0x68:
case 0x69:
- case 0x6a:
- case 0x6b:
- case 0x6c:
- case 0x6d:
- case 0x6e:
- case 0x6f:
+ case 0x6A:
+ case 0x6B:
+ case 0x6C:
+ case 0x6D:
+ case 0x6E:
+ case 0x6F:
case 0x70:
case 0x71:
case 0x72:
@@ -2137,48 +2124,48 @@ class lexer
case 0x77:
case 0x78:
case 0x79:
- case 0x7a:
- case 0x7b:
- case 0x7c:
- case 0x7d:
- case 0x7e:
- case 0x7f:
+ case 0x7A:
+ case 0x7B:
+ case 0x7C:
+ case 0x7D:
+ case 0x7E:
+ case 0x7F:
{
add(current);
break;
}
// U+0080..U+07FF: bytes C2..DF 80..BF
- case 0xc2:
- case 0xc3:
- case 0xc4:
- case 0xc5:
- case 0xc6:
- case 0xc7:
- case 0xc8:
- case 0xc9:
- case 0xca:
- case 0xcb:
- case 0xcc:
- case 0xcd:
- case 0xce:
- case 0xcf:
- case 0xd0:
- case 0xd1:
- case 0xd2:
- case 0xd3:
- case 0xd4:
- case 0xd5:
- case 0xd6:
- case 0xd7:
- case 0xd8:
- case 0xd9:
- case 0xda:
- case 0xdb:
- case 0xdc:
- case 0xdd:
- case 0xde:
- case 0xdf:
+ case 0xC2:
+ case 0xC3:
+ case 0xC4:
+ case 0xC5:
+ case 0xC6:
+ case 0xC7:
+ case 0xC8:
+ case 0xC9:
+ case 0xCA:
+ case 0xCB:
+ case 0xCC:
+ case 0xCD:
+ case 0xCE:
+ case 0xCF:
+ case 0xD0:
+ case 0xD1:
+ case 0xD2:
+ case 0xD3:
+ case 0xD4:
+ case 0xD5:
+ case 0xD6:
+ case 0xD7:
+ case 0xD8:
+ case 0xD9:
+ case 0xDA:
+ case 0xDB:
+ case 0xDC:
+ case 0xDD:
+ case 0xDE:
+ case 0xDF:
{
if (JSON_UNLIKELY(not next_byte_in_range({0x80, 0xBF})))
{
@@ -2188,7 +2175,7 @@ class lexer
}
// U+0800..U+0FFF: bytes E0 A0..BF 80..BF
- case 0xe0:
+ case 0xE0:
{
if (JSON_UNLIKELY(not (next_byte_in_range({0xA0, 0xBF, 0x80, 0xBF}))))
{
@@ -2199,20 +2186,20 @@ class lexer
// U+1000..U+CFFF: bytes E1..EC 80..BF 80..BF
// U+E000..U+FFFF: bytes EE..EF 80..BF 80..BF
- case 0xe1:
- case 0xe2:
- case 0xe3:
- case 0xe4:
- case 0xe5:
- case 0xe6:
- case 0xe7:
- case 0xe8:
- case 0xe9:
- case 0xea:
- case 0xeb:
- case 0xec:
- case 0xee:
- case 0xef:
+ case 0xE1:
+ case 0xE2:
+ case 0xE3:
+ case 0xE4:
+ case 0xE5:
+ case 0xE6:
+ case 0xE7:
+ case 0xE8:
+ case 0xE9:
+ case 0xEA:
+ case 0xEB:
+ case 0xEC:
+ case 0xEE:
+ case 0xEF:
{
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF}))))
{
@@ -2222,7 +2209,7 @@ class lexer
}
// U+D000..U+D7FF: bytes ED 80..9F 80..BF
- case 0xed:
+ case 0xED:
{
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x9F, 0x80, 0xBF}))))
{
@@ -2232,7 +2219,7 @@ class lexer
}
// U+10000..U+3FFFF F0 90..BF 80..BF 80..BF
- case 0xf0:
+ case 0xF0:
{
if (JSON_UNLIKELY(not (next_byte_in_range({0x90, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
{
@@ -2242,9 +2229,9 @@ class lexer
}
// U+40000..U+FFFFF F1..F3 80..BF 80..BF 80..BF
- case 0xf1:
- case 0xf2:
- case 0xf3:
+ case 0xF1:
+ case 0xF2:
+ case 0xF3:
{
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0xBF, 0x80, 0xBF, 0x80, 0xBF}))))
{
@@ -2254,7 +2241,7 @@ class lexer
}
// U+100000..U+10FFFF F4 80..8F 80..BF 80..BF
- case 0xf4:
+ case 0xF4:
{
if (JSON_UNLIKELY(not (next_byte_in_range({0x80, 0x8F, 0x80, 0xBF, 0x80, 0xBF}))))
{
@@ -2772,9 +2759,9 @@ scan_number_done:
{
// escape control characters
std::string result;
- for (auto c : token_string)
+ for (const auto c : token_string)
{
- if ('\x00' <= c and c <= '\x1f')
+ if ('\x00' <= c and c <= '\x1F')
{
// escape control characters
std::stringstream ss;
@@ -2877,10 +2864,10 @@ scan_number_done:
std::size_t chars_read = 0;
/// raw input token string (for error messages)
- std::vector<char> token_string { };
+ std::vector<char> token_string {};
/// buffer for variable-length tokens (numbers, strings)
- std::string yytext { };
+ std::string yytext {};
/// a description of occurred lexer errors
const char* error_message = "";
@@ -3281,7 +3268,7 @@ class parser
}
/*!
- @brief the acutal acceptor
+ @brief the actual acceptor
@invariant 1. The last token is not yet processed. Therefore, the caller
of this function must make sure a token has been read.
@@ -3539,7 +3526,7 @@ class primitive_iterator_t
return *this;
}
- primitive_iterator_t operator++(int)
+ primitive_iterator_t const operator++(int)
{
auto result = *this;
m_it++;
@@ -3552,7 +3539,7 @@ class primitive_iterator_t
return *this;
}
- primitive_iterator_t operator--(int)
+ primitive_iterator_t const operator--(int)
{
auto result = *this;
m_it--;
@@ -3618,7 +3605,7 @@ This class implements a both iterators (iterator and const_iterator) for the
iterators in version 3.0.0 (see https://github.com/nlohmann/json/issues/593)
*/
template<typename BasicJsonType>
-class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJsonType>
+class iter_impl
{
/// allow basic_json to access private members
friend iter_impl<typename std::conditional<std::is_const<BasicJsonType>::value, typename std::remove_const<BasicJsonType>::type, const BasicJsonType>::type>;
@@ -3632,6 +3619,14 @@ class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJso
"iter_impl only accepts (const) basic_json");
public:
+
+ /// The std::iterator class template (used as a base class to provide typedefs) is deprecated in C++17.
+ /// The C++ Standard has never required user-defined iterators to derive from std::iterator.
+ /// A user-defined iterator should provide publicly accessible typedefs named
+ /// iterator_category, value_type, difference_type, pointer, and reference.
+ /// Note that value_type is required to be non-const, even for constant iterators.
+ using iterator_category = std::bidirectional_iterator_tag;
+
/// the type of the values when the iterator is dereferenced
using value_type = typename BasicJsonType::value_type;
/// a type to represent differences between iterators
@@ -3855,7 +3850,7 @@ class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJso
@brief post-increment (it++)
@pre The iterator is initialized; i.e. `m_object != nullptr`.
*/
- iter_impl operator++(int)
+ iter_impl const operator++(int)
{
auto result = *this;
++(*this);
@@ -3898,7 +3893,7 @@ class iter_impl : public std::iterator<std::bidirectional_iterator_tag, BasicJso
@brief post-decrement (it--)
@pre The iterator is initialized; i.e. `m_object != nullptr`.
*/
- iter_impl operator--(int)
+ iter_impl const operator--(int)
{
auto result = *this;
--(*this);
@@ -4291,7 +4286,7 @@ class json_reverse_iterator : public std::reverse_iterator<Base>
{
public:
using difference_type = std::ptrdiff_t;
- /// shortcut to the reverse iterator adaptor
+ /// shortcut to the reverse iterator adapter
using base_iterator = std::reverse_iterator<Base>;
/// the reference type for the pointed-to element
using reference = typename Base::reference;
@@ -4304,7 +4299,7 @@ class json_reverse_iterator : public std::reverse_iterator<Base>
json_reverse_iterator(const base_iterator& it) noexcept : base_iterator(it) {}
/// post-increment (it++)
- json_reverse_iterator operator++(int)
+ json_reverse_iterator const operator++(int)
{
return static_cast<json_reverse_iterator>(base_iterator::operator++(1));
}
@@ -4316,7 +4311,7 @@ class json_reverse_iterator : public std::reverse_iterator<Base>
}
/// post-decrement (it--)
- json_reverse_iterator operator--(int)
+ json_reverse_iterator const operator--(int)
{
return static_cast<json_reverse_iterator>(base_iterator::operator--(1));
}
@@ -4576,12 +4571,12 @@ class binary_reader
case 0x07:
case 0x08:
case 0x09:
- case 0x0a:
- case 0x0b:
- case 0x0c:
- case 0x0d:
- case 0x0e:
- case 0x0f:
+ case 0x0A:
+ case 0x0B:
+ case 0x0C:
+ case 0x0D:
+ case 0x0E:
+ case 0x0F:
case 0x10:
case 0x11:
case 0x12:
@@ -4598,10 +4593,10 @@ class binary_reader
case 0x19: // Unsigned integer (two-byte uint16_t follows)
return get_number<uint16_t>();
- case 0x1a: // Unsigned integer (four-byte uint32_t follows)
+ case 0x1A: // Unsigned integer (four-byte uint32_t follows)
return get_number<uint32_t>();
- case 0x1b: // Unsigned integer (eight-byte uint64_t follows)
+ case 0x1B: // Unsigned integer (eight-byte uint64_t follows)
return get_number<uint64_t>();
// Negative integer -1-0x00..-1-0x17 (-1..-24)
@@ -4615,12 +4610,12 @@ class binary_reader
case 0x27:
case 0x28:
case 0x29:
- case 0x2a:
- case 0x2b:
- case 0x2c:
- case 0x2d:
- case 0x2e:
- case 0x2f:
+ case 0x2A:
+ case 0x2B:
+ case 0x2C:
+ case 0x2D:
+ case 0x2E:
+ case 0x2F:
case 0x30:
case 0x31:
case 0x32:
@@ -4642,12 +4637,12 @@ class binary_reader
return static_cast<number_integer_t>(-1) - get_number<uint16_t>();
}
- case 0x3a: // Negative integer -1-n (four-byte uint32_t follows)
+ case 0x3A: // Negative integer -1-n (four-byte uint32_t follows)
{
return static_cast<number_integer_t>(-1) - get_number<uint32_t>();
}
- case 0x3b: // Negative integer -1-n (eight-byte uint64_t follows)
+ case 0x3B: // Negative integer -1-n (eight-byte uint64_t follows)
{
return static_cast<number_integer_t>(-1) -
static_cast<number_integer_t>(get_number<uint64_t>());
@@ -4664,12 +4659,12 @@ class binary_reader
case 0x67:
case 0x68:
case 0x69:
- case 0x6a:
- case 0x6b:
- case 0x6c:
- case 0x6d:
- case 0x6e:
- case 0x6f:
+ case 0x6A:
+ case 0x6B:
+ case 0x6C:
+ case 0x6D:
+ case 0x6E:
+ case 0x6F:
case 0x70:
case 0x71:
case 0x72:
@@ -4680,9 +4675,9 @@ class binary_reader
case 0x77:
case 0x78: // UTF-8 string (one-byte uint8_t for n follows)
case 0x79: // UTF-8 string (two-byte uint16_t for n follow)
- case 0x7a: // UTF-8 string (four-byte uint32_t for n follow)
- case 0x7b: // UTF-8 string (eight-byte uint64_t for n follow)
- case 0x7f: // UTF-8 string (indefinite length)
+ case 0x7A: // UTF-8 string (four-byte uint32_t for n follow)
+ case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow)
+ case 0x7F: // UTF-8 string (indefinite length)
{
return get_cbor_string();
}
@@ -4698,12 +4693,12 @@ class binary_reader
case 0x87:
case 0x88:
case 0x89:
- case 0x8a:
- case 0x8b:
- case 0x8c:
- case 0x8d:
- case 0x8e:
- case 0x8f:
+ case 0x8A:
+ case 0x8B:
+ case 0x8C:
+ case 0x8D:
+ case 0x8E:
+ case 0x8F:
case 0x90:
case 0x91:
case 0x92:
@@ -4713,7 +4708,7 @@ class binary_reader
case 0x96:
case 0x97:
{
- return get_cbor_array(current & 0x1f);
+ return get_cbor_array(current & 0x1F);
}
case 0x98: // array (one-byte uint8_t for n follows)
@@ -4726,20 +4721,20 @@ class binary_reader
return get_cbor_array(get_number<uint16_t>());
}
- case 0x9a: // array (four-byte uint32_t for n follow)
+ case 0x9A: // array (four-byte uint32_t for n follow)
{
return get_cbor_array(get_number<uint32_t>());
}
- case 0x9b: // array (eight-byte uint64_t for n follow)
+ case 0x9B: // array (eight-byte uint64_t for n follow)
{
return get_cbor_array(get_number<uint64_t>());
}
- case 0x9f: // array (indefinite length)
+ case 0x9F: // array (indefinite length)
{
BasicJsonType result = value_t::array;
- while (get() != 0xff)
+ while (get() != 0xFF)
{
result.push_back(parse_cbor_internal(false));
}
@@ -4747,58 +4742,58 @@ class binary_reader
}
// map (0x00..0x17 pairs of data items follow)
- case 0xa0:
- case 0xa1:
- case 0xa2:
- case 0xa3:
- case 0xa4:
- case 0xa5:
- case 0xa6:
- case 0xa7:
- case 0xa8:
- case 0xa9:
- case 0xaa:
- case 0xab:
- case 0xac:
- case 0xad:
- case 0xae:
- case 0xaf:
- case 0xb0:
- case 0xb1:
- case 0xb2:
- case 0xb3:
- case 0xb4:
- case 0xb5:
- case 0xb6:
- case 0xb7:
- {
- return get_cbor_object(current & 0x1f);
- }
-
- case 0xb8: // map (one-byte uint8_t for n follows)
+ case 0xA0:
+ case 0xA1:
+ case 0xA2:
+ case 0xA3:
+ case 0xA4:
+ case 0xA5:
+ case 0xA6:
+ case 0xA7:
+ case 0xA8:
+ case 0xA9:
+ case 0xAA:
+ case 0xAB:
+ case 0xAC:
+ case 0xAD:
+ case 0xAE:
+ case 0xAF:
+ case 0xB0:
+ case 0xB1:
+ case 0xB2:
+ case 0xB3:
+ case 0xB4:
+ case 0xB5:
+ case 0xB6:
+ case 0xB7:
+ {
+ return get_cbor_object(current & 0x1F);
+ }
+
+ case 0xB8: // map (one-byte uint8_t for n follows)
{
return get_cbor_object(get_number<uint8_t>());
}
- case 0xb9: // map (two-byte uint16_t for n follow)
+ case 0xB9: // map (two-byte uint16_t for n follow)
{
return get_cbor_object(get_number<uint16_t>());
}
- case 0xba: // map (four-byte uint32_t for n follow)
+ case 0xBA: // map (four-byte uint32_t for n follow)
{
return get_cbor_object(get_number<uint32_t>());
}
- case 0xbb: // map (eight-byte uint64_t for n follow)
+ case 0xBB: // map (eight-byte uint64_t for n follow)
{
return get_cbor_object(get_number<uint64_t>());
}
- case 0xbf: // map (indefinite length)
+ case 0xBF: // map (indefinite length)
{
BasicJsonType result = value_t::object;
- while (get() != 0xff)
+ while (get() != 0xFF)
{
auto key = get_cbor_string();
result[key] = parse_cbor_internal();
@@ -4806,22 +4801,22 @@ class binary_reader
return result;
}
- case 0xf4: // false
+ case 0xF4: // false
{
return false;
}
- case 0xf5: // true
+ case 0xF5: // true
{
return true;
}
- case 0xf6: // null
+ case 0xF6: // null
{
return value_t::null;
}
- case 0xf9: // Half-Precision Float (two-byte IEEE 754)
+ case 0xF9: // Half-Precision Float (two-byte IEEE 754)
{
const int byte1 = get();
check_eof();
@@ -4837,8 +4832,8 @@ class binary_reader
// half-precision floating-point numbers in the C language
// is shown in Fig. 3.
const int half = (byte1 << 8) + byte2;
- const int exp = (half >> 10) & 0x1f;
- const int mant = half & 0x3ff;
+ const int exp = (half >> 10) & 0x1F;
+ const int mant = half & 0x3FF;
double val;
if (exp == 0)
{
@@ -4856,12 +4851,12 @@ class binary_reader
return (half & 0x8000) != 0 ? -val : val;
}
- case 0xfa: // Single-Precision Float (four-byte IEEE 754)
+ case 0xFA: // Single-Precision Float (four-byte IEEE 754)
{
return get_number<float>();
}
- case 0xfb: // Double-Precision Float (eight-byte IEEE 754)
+ case 0xFB: // Double-Precision Float (eight-byte IEEE 754)
{
return get_number<double>();
}
@@ -4869,7 +4864,7 @@ class binary_reader
default: // anything else (0xFF is handled inside the other types)
{
std::stringstream ss;
- ss << std::setw(2) << std::setfill('0') << std::hex << current;
+ ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current;
JSON_THROW(parse_error::create(112, chars_read, "error reading CBOR; last byte: 0x" + ss.str()));
}
}
@@ -4894,12 +4889,12 @@ class binary_reader
case 0x07:
case 0x08:
case 0x09:
- case 0x0a:
- case 0x0b:
- case 0x0c:
- case 0x0d:
- case 0x0e:
- case 0x0f:
+ case 0x0A:
+ case 0x0B:
+ case 0x0C:
+ case 0x0D:
+ case 0x0E:
+ case 0x0F:
case 0x10:
case 0x11:
case 0x12:
@@ -4910,12 +4905,12 @@ class binary_reader
case 0x17:
case 0x18:
case 0x19:
- case 0x1a:
- case 0x1b:
- case 0x1c:
- case 0x1d:
- case 0x1e:
- case 0x1f:
+ case 0x1A:
+ case 0x1B:
+ case 0x1C:
+ case 0x1D:
+ case 0x1E:
+ case 0x1F:
case 0x20:
case 0x21:
case 0x22:
@@ -4926,12 +4921,12 @@ class binary_reader
case 0x27:
case 0x28:
case 0x29:
- case 0x2a:
- case 0x2b:
- case 0x2c:
- case 0x2d:
- case 0x2e:
- case 0x2f:
+ case 0x2A:
+ case 0x2B:
+ case 0x2C:
+ case 0x2D:
+ case 0x2E:
+ case 0x2F:
case 0x30:
case 0x31:
case 0x32:
@@ -4942,12 +4937,12 @@ class binary_reader
case 0x37:
case 0x38:
case 0x39:
- case 0x3a:
- case 0x3b:
- case 0x3c:
- case 0x3d:
- case 0x3e:
- case 0x3f:
+ case 0x3A:
+ case 0x3B:
+ case 0x3C:
+ case 0x3D:
+ case 0x3E:
+ case 0x3F:
case 0x40:
case 0x41:
case 0x42:
@@ -4958,12 +4953,12 @@ class binary_reader
case 0x47:
case 0x48:
case 0x49:
- case 0x4a:
- case 0x4b:
- case 0x4c:
- case 0x4d:
- case 0x4e:
- case 0x4f:
+ case 0x4A:
+ case 0x4B:
+ case 0x4C:
+ case 0x4D:
+ case 0x4E:
+ case 0x4F:
case 0x50:
case 0x51:
case 0x52:
@@ -4974,12 +4969,12 @@ class binary_reader
case 0x57:
case 0x58:
case 0x59:
- case 0x5a:
- case 0x5b:
- case 0x5c:
- case 0x5d:
- case 0x5e:
- case 0x5f:
+ case 0x5A:
+ case 0x5B:
+ case 0x5C:
+ case 0x5D:
+ case 0x5E:
+ case 0x5F:
case 0x60:
case 0x61:
case 0x62:
@@ -4990,12 +4985,12 @@ class binary_reader
case 0x67:
case 0x68:
case 0x69:
- case 0x6a:
- case 0x6b:
- case 0x6c:
- case 0x6d:
- case 0x6e:
- case 0x6f:
+ case 0x6A:
+ case 0x6B:
+ case 0x6C:
+ case 0x6D:
+ case 0x6E:
+ case 0x6F:
case 0x70:
case 0x71:
case 0x72:
@@ -5006,12 +5001,12 @@ class binary_reader
case 0x77:
case 0x78:
case 0x79:
- case 0x7a:
- case 0x7b:
- case 0x7c:
- case 0x7d:
- case 0x7e:
- case 0x7f:
+ case 0x7A:
+ case 0x7B:
+ case 0x7C:
+ case 0x7D:
+ case 0x7E:
+ case 0x7F:
return static_cast<number_unsigned_t>(current);
// fixmap
@@ -5025,14 +5020,14 @@ class binary_reader
case 0x87:
case 0x88:
case 0x89:
- case 0x8a:
- case 0x8b:
- case 0x8c:
- case 0x8d:
- case 0x8e:
- case 0x8f:
+ case 0x8A:
+ case 0x8B:
+ case 0x8C:
+ case 0x8D:
+ case 0x8E:
+ case 0x8F:
{
- return get_msgpack_object(current & 0x0f);
+ return get_msgpack_object(current & 0x0F);
}
// fixarray
@@ -5046,154 +5041,154 @@ class binary_reader
case 0x97:
case 0x98:
case 0x99:
- case 0x9a:
- case 0x9b:
- case 0x9c:
- case 0x9d:
- case 0x9e:
- case 0x9f:
+ case 0x9A:
+ case 0x9B:
+ case 0x9C:
+ case 0x9D:
+ case 0x9E:
+ case 0x9F:
{
- return get_msgpack_array(current & 0x0f);
+ return get_msgpack_array(current & 0x0F);
}
// fixstr
- case 0xa0:
- case 0xa1:
- case 0xa2:
- case 0xa3:
- case 0xa4:
- case 0xa5:
- case 0xa6:
- case 0xa7:
- case 0xa8:
- case 0xa9:
- case 0xaa:
- case 0xab:
- case 0xac:
- case 0xad:
- case 0xae:
- case 0xaf:
- case 0xb0:
- case 0xb1:
- case 0xb2:
- case 0xb3:
- case 0xb4:
- case 0xb5:
- case 0xb6:
- case 0xb7:
- case 0xb8:
- case 0xb9:
- case 0xba:
- case 0xbb:
- case 0xbc:
- case 0xbd:
- case 0xbe:
- case 0xbf:
+ case 0xA0:
+ case 0xA1:
+ case 0xA2:
+ case 0xA3:
+ case 0xA4:
+ case 0xA5:
+ case 0xA6:
+ case 0xA7:
+ case 0xA8:
+ case 0xA9:
+ case 0xAA:
+ case 0xAB:
+ case 0xAC:
+ case 0xAD:
+ case 0xAE:
+ case 0xAF:
+ case 0xB0:
+ case 0xB1:
+ case 0xB2:
+ case 0xB3:
+ case 0xB4:
+ case 0xB5:
+ case 0xB6:
+ case 0xB7:
+ case 0xB8:
+ case 0xB9:
+ case 0xBA:
+ case 0xBB:
+ case 0xBC:
+ case 0xBD:
+ case 0xBE:
+ case 0xBF:
return get_msgpack_string();
- case 0xc0: // nil
+ case 0xC0: // nil
return value_t::null;
- case 0xc2: // false
+ case 0xC2: // false
return false;
- case 0xc3: // true
+ case 0xC3: // true
return true;
- case 0xca: // float 32
+ case 0xCA: // float 32
return get_number<float>();
- case 0xcb: // float 64
+ case 0xCB: // float 64
return get_number<double>();
- case 0xcc: // uint 8
+ case 0xCC: // uint 8
return get_number<uint8_t>();
- case 0xcd: // uint 16
+ case 0xCD: // uint 16
return get_number<uint16_t>();
- case 0xce: // uint 32
+ case 0xCE: // uint 32
return get_number<uint32_t>();
- case 0xcf: // uint 64
+ case 0xCF: // uint 64
return get_number<uint64_t>();
- case 0xd0: // int 8
+ case 0xD0: // int 8
return get_number<int8_t>();
- case 0xd1: // int 16
+ case 0xD1: // int 16
return get_number<int16_t>();
- case 0xd2: // int 32
+ case 0xD2: // int 32
return get_number<int32_t>();
- case 0xd3: // int 64
+ case 0xD3: // int 64
return get_number<int64_t>();
- case 0xd9: // str 8
- case 0xda: // str 16
- case 0xdb: // str 32
+ case 0xD9: // str 8
+ case 0xDA: // str 16
+ case 0xDB: // str 32
return get_msgpack_string();
- case 0xdc: // array 16
+ case 0xDC: // array 16
{
return get_msgpack_array(get_number<uint16_t>());
}
- case 0xdd: // array 32
+ case 0xDD: // array 32
{
return get_msgpack_array(get_number<uint32_t>());
}
- case 0xde: // map 16
+ case 0xDE: // map 16
{
return get_msgpack_object(get_number<uint16_t>());
}
- case 0xdf: // map 32
+ case 0xDF: // map 32
{
return get_msgpack_object(get_number<uint32_t>());
}
// positive fixint
- case 0xe0:
- case 0xe1:
- case 0xe2:
- case 0xe3:
- case 0xe4:
- case 0xe5:
- case 0xe6:
- case 0xe7:
- case 0xe8:
- case 0xe9:
- case 0xea:
- case 0xeb:
- case 0xec:
- case 0xed:
- case 0xee:
- case 0xef:
- case 0xf0:
- case 0xf1:
- case 0xf2:
- case 0xf3:
- case 0xf4:
- case 0xf5:
- case 0xf6:
- case 0xf7:
- case 0xf8:
- case 0xf9:
- case 0xfa:
- case 0xfb:
- case 0xfc:
- case 0xfd:
- case 0xfe:
- case 0xff:
+ case 0xE0:
+ case 0xE1:
+ case 0xE2:
+ case 0xE3:
+ case 0xE4:
+ case 0xE5:
+ case 0xE6:
+ case 0xE7:
+ case 0xE8:
+ case 0xE9:
+ case 0xEA:
+ case 0xEB:
+ case 0xEC:
+ case 0xED:
+ case 0xEE:
+ case 0xEF:
+ case 0xF0:
+ case 0xF1:
+ case 0xF2:
+ case 0xF3:
+ case 0xF4:
+ case 0xF5:
+ case 0xF6:
+ case 0xF7:
+ case 0xF8:
+ case 0xF9:
+ case 0xFA:
+ case 0xFB:
+ case 0xFC:
+ case 0xFD:
+ case 0xFE:
+ case 0xFF:
return static_cast<int8_t>(current);
default: // anything else
{
std::stringstream ss;
- ss << std::setw(2) << std::setfill('0') << std::hex << current;
+ ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current;
JSON_THROW(parse_error::create(112, chars_read,
"error reading MessagePack; last byte: 0x" + ss.str()));
}
@@ -5309,12 +5304,12 @@ class binary_reader
case 0x67:
case 0x68:
case 0x69:
- case 0x6a:
- case 0x6b:
- case 0x6c:
- case 0x6d:
- case 0x6e:
- case 0x6f:
+ case 0x6A:
+ case 0x6B:
+ case 0x6C:
+ case 0x6D:
+ case 0x6E:
+ case 0x6F:
case 0x70:
case 0x71:
case 0x72:
@@ -5324,7 +5319,7 @@ class binary_reader
case 0x76:
case 0x77:
{
- return get_string(current & 0x1f);
+ return get_string(current & 0x1F);
}
case 0x78: // UTF-8 string (one-byte uint8_t for n follows)
@@ -5337,20 +5332,20 @@ class binary_reader
return get_string(get_number<uint16_t>());
}
- case 0x7a: // UTF-8 string (four-byte uint32_t for n follow)
+ case 0x7A: // UTF-8 string (four-byte uint32_t for n follow)
{
return get_string(get_number<uint32_t>());
}
- case 0x7b: // UTF-8 string (eight-byte uint64_t for n follow)
+ case 0x7B: // UTF-8 string (eight-byte uint64_t for n follow)
{
return get_string(get_number<uint64_t>());
}
- case 0x7f: // UTF-8 string (indefinite length)
+ case 0x7F: // UTF-8 string (indefinite length)
{
std::string result;
- while (get() != 0xff)
+ while (get() != 0xFF)
{
check_eof();
result.push_back(static_cast<char>(current));
@@ -5361,7 +5356,7 @@ class binary_reader
default:
{
std::stringstream ss;
- ss << std::setw(2) << std::setfill('0') << std::hex << current;
+ ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current;
JSON_THROW(parse_error::create(113, chars_read, "expected a CBOR string; last byte: 0x" + ss.str()));
}
}
@@ -5412,53 +5407,53 @@ class binary_reader
switch (current)
{
// fixstr
- case 0xa0:
- case 0xa1:
- case 0xa2:
- case 0xa3:
- case 0xa4:
- case 0xa5:
- case 0xa6:
- case 0xa7:
- case 0xa8:
- case 0xa9:
- case 0xaa:
- case 0xab:
- case 0xac:
- case 0xad:
- case 0xae:
- case 0xaf:
- case 0xb0:
- case 0xb1:
- case 0xb2:
- case 0xb3:
- case 0xb4:
- case 0xb5:
- case 0xb6:
- case 0xb7:
- case 0xb8:
- case 0xb9:
- case 0xba:
- case 0xbb:
- case 0xbc:
- case 0xbd:
- case 0xbe:
- case 0xbf:
- {
- return get_string(current & 0x1f);
- }
-
- case 0xd9: // str 8
+ case 0xA0:
+ case 0xA1:
+ case 0xA2:
+ case 0xA3:
+ case 0xA4:
+ case 0xA5:
+ case 0xA6:
+ case 0xA7:
+ case 0xA8:
+ case 0xA9:
+ case 0xAA:
+ case 0xAB:
+ case 0xAC:
+ case 0xAD:
+ case 0xAE:
+ case 0xAF:
+ case 0xB0:
+ case 0xB1:
+ case 0xB2:
+ case 0xB3:
+ case 0xB4:
+ case 0xB5:
+ case 0xB6:
+ case 0xB7:
+ case 0xB8:
+ case 0xB9:
+ case 0xBA:
+ case 0xBB:
+ case 0xBC:
+ case 0xBD:
+ case 0xBE:
+ case 0xBF:
+ {
+ return get_string(current & 0x1F);
+ }
+
+ case 0xD9: // str 8
{
return get_string(get_number<uint8_t>());
}
- case 0xda: // str 16
+ case 0xDA: // str 16
{
return get_string(get_number<uint16_t>());
}
- case 0xdb: // str 32
+ case 0xDB: // str 32
{
return get_string(get_number<uint32_t>());
}
@@ -5466,7 +5461,7 @@ class binary_reader
default:
{
std::stringstream ss;
- ss << std::setw(2) << std::setfill('0') << std::hex << current;
+ ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << current;
JSON_THROW(parse_error::create(113, chars_read,
"expected a MessagePack string; last byte: 0x" + ss.str()));
}
@@ -5562,15 +5557,15 @@ class binary_writer
{
case value_t::null:
{
- oa->write_character(static_cast<CharType>(0xf6));
+ oa->write_character(static_cast<CharType>(0xF6));
break;
}
case value_t::boolean:
{
oa->write_character(j.m_value.boolean
- ? static_cast<CharType>(0xf5)
- : static_cast<CharType>(0xf4));
+ ? static_cast<CharType>(0xF5)
+ : static_cast<CharType>(0xF4));
break;
}
@@ -5597,12 +5592,12 @@ class binary_writer
}
else if (j.m_value.number_integer <= (std::numeric_limits<uint32_t>::max)())
{
- oa->write_character(static_cast<CharType>(0x1a));
+ oa->write_character(static_cast<CharType>(0x1A));
write_number(static_cast<uint32_t>(j.m_value.number_integer));
}
else
{
- oa->write_character(static_cast<CharType>(0x1b));
+ oa->write_character(static_cast<CharType>(0x1B));
write_number(static_cast<uint64_t>(j.m_value.number_integer));
}
}
@@ -5627,12 +5622,12 @@ class binary_writer
}
else if (positive_number <= (std::numeric_limits<uint32_t>::max)())
{
- oa->write_character(static_cast<CharType>(0x3a));
+ oa->write_character(static_cast<CharType>(0x3A));
write_number(static_cast<uint32_t>(positive_number));
}
else
{
- oa->write_character(static_cast<CharType>(0x3b));
+ oa->write_character(static_cast<CharType>(0x3B));
write_number(static_cast<uint64_t>(positive_number));
}
}
@@ -5657,12 +5652,12 @@ class binary_writer
}
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint32_t>::max)())
{
- oa->write_character(static_cast<CharType>(0x1a));
+ oa->write_character(static_cast<CharType>(0x1A));
write_number(static_cast<uint32_t>(j.m_value.number_unsigned));
}
else
{
- oa->write_character(static_cast<CharType>(0x1b));
+ oa->write_character(static_cast<CharType>(0x1B));
write_number(static_cast<uint64_t>(j.m_value.number_unsigned));
}
break;
@@ -5670,7 +5665,7 @@ class binary_writer
case value_t::number_float: // Double-Precision Float
{
- oa->write_character(static_cast<CharType>(0xfb));
+ oa->write_character(static_cast<CharType>(0xFB));
write_number(j.m_value.number_float);
break;
}
@@ -5683,25 +5678,25 @@ class binary_writer
{
write_number(static_cast<uint8_t>(0x60 + N));
}
- else if (N <= 0xff)
+ else if (N <= 0xFF)
{
oa->write_character(static_cast<CharType>(0x78));
write_number(static_cast<uint8_t>(N));
}
- else if (N <= 0xffff)
+ else if (N <= 0xFFFF)
{
oa->write_character(static_cast<CharType>(0x79));
write_number(static_cast<uint16_t>(N));
}
- else if (N <= 0xffffffff)
+ else if (N <= 0xFFFFFFFF)
{
- oa->write_character(static_cast<CharType>(0x7a));
+ oa->write_character(static_cast<CharType>(0x7A));
write_number(static_cast<uint32_t>(N));
}
// LCOV_EXCL_START
- else if (N <= 0xffffffffffffffff)
+ else if (N <= 0xFFFFFFFFFFFFFFFF)
{
- oa->write_character(static_cast<CharType>(0x7b));
+ oa->write_character(static_cast<CharType>(0x7B));
write_number(static_cast<uint64_t>(N));
}
// LCOV_EXCL_STOP
@@ -5721,25 +5716,25 @@ class binary_writer
{
write_number(static_cast<uint8_t>(0x80 + N));
}
- else if (N <= 0xff)
+ else if (N <= 0xFF)
{
oa->write_character(static_cast<CharType>(0x98));
write_number(static_cast<uint8_t>(N));
}
- else if (N <= 0xffff)
+ else if (N <= 0xFFFF)
{
oa->write_character(static_cast<CharType>(0x99));
write_number(static_cast<uint16_t>(N));
}
- else if (N <= 0xffffffff)
+ else if (N <= 0xFFFFFFFF)
{
- oa->write_character(static_cast<CharType>(0x9a));
+ oa->write_character(static_cast<CharType>(0x9A));
write_number(static_cast<uint32_t>(N));
}
// LCOV_EXCL_START
- else if (N <= 0xffffffffffffffff)
+ else if (N <= 0xFFFFFFFFFFFFFFFF)
{
- oa->write_character(static_cast<CharType>(0x9b));
+ oa->write_character(static_cast<CharType>(0x9B));
write_number(static_cast<uint64_t>(N));
}
// LCOV_EXCL_STOP
@@ -5758,27 +5753,27 @@ class binary_writer
const auto N = j.m_value.object->size();
if (N <= 0x17)
{
- write_number(static_cast<uint8_t>(0xa0 + N));
+ write_number(static_cast<uint8_t>(0xA0 + N));
}
- else if (N <= 0xff)
+ else if (N <= 0xFF)
{
- oa->write_character(static_cast<CharType>(0xb8));
+ oa->write_character(static_cast<CharType>(0xB8));
write_number(static_cast<uint8_t>(N));
}
- else if (N <= 0xffff)
+ else if (N <= 0xFFFF)
{
- oa->write_character(static_cast<CharType>(0xb9));
+ oa->write_character(static_cast<CharType>(0xB9));
write_number(static_cast<uint16_t>(N));
}
- else if (N <= 0xffffffff)
+ else if (N <= 0xFFFFFFFF)
{
- oa->write_character(static_cast<CharType>(0xba));
+ oa->write_character(static_cast<CharType>(0xBA));
write_number(static_cast<uint32_t>(N));
}
// LCOV_EXCL_START
- else if (N <= 0xffffffffffffffff)
+ else if (N <= 0xFFFFFFFFFFFFFFFF)
{
- oa->write_character(static_cast<CharType>(0xbb));
+ oa->write_character(static_cast<CharType>(0xBB));
write_number(static_cast<uint64_t>(N));
}
// LCOV_EXCL_STOP
@@ -5806,15 +5801,15 @@ class binary_writer
{
case value_t::null: // nil
{
- oa->write_character(static_cast<CharType>(0xc0));
+ oa->write_character(static_cast<CharType>(0xC0));
break;
}
case value_t::boolean: // true and false
{
oa->write_character(j.m_value.boolean
- ? static_cast<CharType>(0xc3)
- : static_cast<CharType>(0xc2));
+ ? static_cast<CharType>(0xC3)
+ : static_cast<CharType>(0xC2));
break;
}
@@ -5833,25 +5828,25 @@ class binary_writer
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint8_t>::max)())
{
// uint 8
- oa->write_character(static_cast<CharType>(0xcc));
+ oa->write_character(static_cast<CharType>(0xCC));
write_number(static_cast<uint8_t>(j.m_value.number_integer));
}
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint16_t>::max)())
{
// uint 16
- oa->write_character(static_cast<CharType>(0xcd));
+ oa->write_character(static_cast<CharType>(0xCD));
write_number(static_cast<uint16_t>(j.m_value.number_integer));
}
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint32_t>::max)())
{
// uint 32
- oa->write_character(static_cast<CharType>(0xce));
+ oa->write_character(static_cast<CharType>(0xCE));
write_number(static_cast<uint32_t>(j.m_value.number_integer));
}
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint64_t>::max)())
{
// uint 64
- oa->write_character(static_cast<CharType>(0xcf));
+ oa->write_character(static_cast<CharType>(0xCF));
write_number(static_cast<uint64_t>(j.m_value.number_integer));
}
}
@@ -5866,28 +5861,28 @@ class binary_writer
j.m_value.number_integer <= (std::numeric_limits<int8_t>::max)())
{
// int 8
- oa->write_character(static_cast<CharType>(0xd0));
+ oa->write_character(static_cast<CharType>(0xD0));
write_number(static_cast<int8_t>(j.m_value.number_integer));
}
else if (j.m_value.number_integer >= (std::numeric_limits<int16_t>::min)() and
j.m_value.number_integer <= (std::numeric_limits<int16_t>::max)())
{
// int 16
- oa->write_character(static_cast<CharType>(0xd1));
+ oa->write_character(static_cast<CharType>(0xD1));
write_number(static_cast<int16_t>(j.m_value.number_integer));
}
else if (j.m_value.number_integer >= (std::numeric_limits<int32_t>::min)() and
j.m_value.number_integer <= (std::numeric_limits<int32_t>::max)())
{
// int 32
- oa->write_character(static_cast<CharType>(0xd2));
+ oa->write_character(static_cast<CharType>(0xD2));
write_number(static_cast<int32_t>(j.m_value.number_integer));
}
else if (j.m_value.number_integer >= (std::numeric_limits<int64_t>::min)() and
j.m_value.number_integer <= (std::numeric_limits<int64_t>::max)())
{
// int 64
- oa->write_character(static_cast<CharType>(0xd3));
+ oa->write_character(static_cast<CharType>(0xD3));
write_number(static_cast<int64_t>(j.m_value.number_integer));
}
}
@@ -5904,25 +5899,25 @@ class binary_writer
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint8_t>::max)())
{
// uint 8
- oa->write_character(static_cast<CharType>(0xcc));
+ oa->write_character(static_cast<CharType>(0xCC));
write_number(static_cast<uint8_t>(j.m_value.number_integer));
}
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint16_t>::max)())
{
// uint 16
- oa->write_character(static_cast<CharType>(0xcd));
+ oa->write_character(static_cast<CharType>(0xCD));
write_number(static_cast<uint16_t>(j.m_value.number_integer));
}
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint32_t>::max)())
{
// uint 32
- oa->write_character(static_cast<CharType>(0xce));
+ oa->write_character(static_cast<CharType>(0xCE));
write_number(static_cast<uint32_t>(j.m_value.number_integer));
}
else if (j.m_value.number_unsigned <= (std::numeric_limits<uint64_t>::max)())
{
// uint 64
- oa->write_character(static_cast<CharType>(0xcf));
+ oa->write_character(static_cast<CharType>(0xCF));
write_number(static_cast<uint64_t>(j.m_value.number_integer));
}
break;
@@ -5930,7 +5925,7 @@ class binary_writer
case value_t::number_float: // float 64
{
- oa->write_character(static_cast<CharType>(0xcb));
+ oa->write_character(static_cast<CharType>(0xCB));
write_number(j.m_value.number_float);
break;
}
@@ -5942,24 +5937,24 @@ class binary_writer
if (N <= 31)
{
// fixstr
- write_number(static_cast<uint8_t>(0xa0 | N));
+ write_number(static_cast<uint8_t>(0xA0 | N));
}
else if (N <= 255)
{
// str 8
- oa->write_character(static_cast<CharType>(0xd9));
+ oa->write_character(static_cast<CharType>(0xD9));
write_number(static_cast<uint8_t>(N));
}
else if (N <= 65535)
{
// str 16
- oa->write_character(static_cast<CharType>(0xda));
+ oa->write_character(static_cast<CharType>(0xDA));
write_number(static_cast<uint16_t>(N));
}
else if (N <= 4294967295)
{
// str 32
- oa->write_character(static_cast<CharType>(0xdb));
+ oa->write_character(static_cast<CharType>(0xDB));
write_number(static_cast<uint32_t>(N));
}
@@ -5979,16 +5974,16 @@ class binary_writer
// fixarray
write_number(static_cast<uint8_t>(0x90 | N));
}
- else if (N <= 0xffff)
+ else if (N <= 0xFFFF)
{
// array 16
- oa->write_character(static_cast<CharType>(0xdc));
+ oa->write_character(static_cast<CharType>(0xDC));
write_number(static_cast<uint16_t>(N));
}
- else if (N <= 0xffffffff)
+ else if (N <= 0xFFFFFFFF)
{
// array 32
- oa->write_character(static_cast<CharType>(0xdd));
+ oa->write_character(static_cast<CharType>(0xDD));
write_number(static_cast<uint32_t>(N));
}
@@ -6007,18 +6002,18 @@ class binary_writer
if (N <= 15)
{
// fixmap
- write_number(static_cast<uint8_t>(0x80 | (N & 0xf)));
+ write_number(static_cast<uint8_t>(0x80 | (N & 0xF)));
}
else if (N <= 65535)
{
// map 16
- oa->write_character(static_cast<CharType>(0xde));
+ oa->write_character(static_cast<CharType>(0xDE));
write_number(static_cast<uint16_t>(N));
}
else if (N <= 4294967295)
{
// map 32
- oa->write_character(static_cast<CharType>(0xdf));
+ oa->write_character(static_cast<CharType>(0xDF));
write_number(static_cast<uint32_t>(N));
}
@@ -6363,9 +6358,9 @@ class serializer
case 0x05:
case 0x06:
case 0x07:
- case 0x0b:
- case 0x0e:
- case 0x0f:
+ case 0x0B:
+ case 0x0E:
+ case 0x0F:
case 0x10:
case 0x11:
case 0x12:
@@ -6376,12 +6371,12 @@ class serializer
case 0x17:
case 0x18:
case 0x19:
- case 0x1a:
- case 0x1b:
- case 0x1c:
- case 0x1d:
- case 0x1e:
- case 0x1f:
+ case 0x1A:
+ case 0x1B:
+ case 0x1C:
+ case 0x1D:
+ case 0x1E:
+ case 0x1F:
{
// from c (1 byte) to \uxxxx (6 bytes)
res += 5;
@@ -6393,12 +6388,8 @@ class serializer
if (ensure_ascii and (s[i] & 0x80 or s[i] == 0x7F))
{
const auto bytes = bytes_following(static_cast<uint8_t>(s[i]));
- if (bytes == std::string::npos)
- {
- // invalid characters are treated as is, so no
- // additional space will be used
- break;
- }
+ // invalid characters will be detected by throw_if_invalid_utf8
+ assert (bytes != std::string::npos);
if (bytes == 3)
{
@@ -6492,6 +6483,8 @@ class serializer
*/
void dump_escaped(const string_t& s, const bool ensure_ascii) const
{
+ throw_if_invalid_utf8(s);
+
const auto space = extra_space(s, ensure_ascii);
if (space == 0)
{
@@ -6514,7 +6507,7 @@ class serializer
break;
}
- case '\\': // reverse solidus (0x5c)
+ case '\\': // reverse solidus (0x5C)
{
// nothing to change
pos += 2;
@@ -6528,21 +6521,21 @@ class serializer
break;
}
- case '\f': // formfeed (0x0c)
+ case '\f': // formfeed (0x0C)
{
result[pos + 1] = 'f';
pos += 2;
break;
}
- case '\n': // newline (0x0a)
+ case '\n': // newline (0x0A)
{
result[pos + 1] = 'n';
pos += 2;
break;
}
- case '\r': // carriage return (0x0d)
+ case '\r': // carriage return (0x0D)
{
result[pos + 1] = 'r';
pos += 2;
@@ -6564,21 +6557,18 @@ class serializer
(ensure_ascii and (s[i] & 0x80 or s[i] == 0x7F)))
{
const auto bytes = bytes_following(static_cast<uint8_t>(s[i]));
- if (bytes == std::string::npos)
- {
- // copy invalid character as is
- result[pos++] = s[i];
- break;
- }
+ // invalid characters will be detected by throw_if_invalid_utf8
+ assert (bytes != std::string::npos);
// check that the additional bytes are present
assert(i + bytes < s.size());
- // to use \uxxxx escaping, we first need to caluclate
+ // to use \uxxxx escaping, we first need to calculate
// the codepoint from the UTF-8 bytes
int codepoint = 0;
- assert(0 <= bytes and bytes <= 3);
+ // bytes is unsigned type:
+ assert(bytes <= 3);
switch (bytes)
{
case 0:
@@ -6641,11 +6631,10 @@ class serializer
@param[in] x integer number (signed or unsigned) to dump
@tparam NumberType either @a number_integer_t or @a number_unsigned_t
*/
- template <
- typename NumberType,
- detail::enable_if_t<std::is_same<NumberType, number_unsigned_t>::value or
- std::is_same<NumberType, number_integer_t>::value,
- int> = 0 >
+ template<typename NumberType, detail::enable_if_t<
+ std::is_same<NumberType, number_unsigned_t>::value or
+ std::is_same<NumberType, number_integer_t>::value,
+ int> = 0>
void dump_integer(NumberType x)
{
// special case for "0"
@@ -6743,6 +6732,87 @@ class serializer
}
}
+ /*!
+ @brief check whether a string is UTF-8 encoded
+
+ The function checks each byte of a string whether it is UTF-8 encoded. The
+ result of the check is stored in the @a state parameter. The function must
+ be called initially with state 0 (accept). State 1 means the string must
+ be rejected, because the current byte is not allowed. If the string is
+ completely processed, but the state is non-zero, the string ended
+ prematurely; that is, the last byte indicated more bytes should have
+ followed.
+
+ @param[in,out] state the state of the decoding
+ @param[in] byte next byte to decode
+
+ @note The function has been edited: a std::array is used and the code
+ point is not calculated.
+
+ @copyright Copyright (c) 2008-2009 Bjoern Hoehrmann <bjoern@hoehrmann.de>
+ @sa http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
+ */
+ static void decode(uint8_t& state, const uint8_t byte)
+ {
+ static const std::array<uint8_t, 400> utf8d =
+ {
+ {
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 00..1F
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 20..3F
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 40..5F
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 60..7F
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, // 80..9F
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, // A0..BF
+ 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // C0..DF
+ 0xA, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, // E0..EF
+ 0xB, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, // F0..FF
+ 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, // s0..s0
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, // s1..s2
+ 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, // s3..s4
+ 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, // s5..s6
+ 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 // s7..s8
+ }
+ };
+
+ const uint8_t type = utf8d[byte];
+ state = utf8d[256u + state * 16u + type];
+ }
+
+ /*!
+ @brief throw an exception if a string is not UTF-8 encoded
+
+ @param[in] str UTF-8 string to check
+ @throw type_error.316 if passed string is not UTF-8 encoded
+
+ @since version 3.0.0
+ */
+ static void throw_if_invalid_utf8(const std::string& str)
+ {
+ // start with state 0 (= accept)
+ uint8_t state = 0;
+
+ for (size_t i = 0; i < str.size(); ++i)
+ {
+ const auto byte = static_cast<uint8_t>(str[i]);
+ decode(state, byte);
+ if (state == 1)
+ {
+ // state 1 means reject
+ std::stringstream ss;
+ ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << static_cast<int>(byte);
+ JSON_THROW(type_error::create(316, "invalid UTF-8 byte at index " + std::to_string(i) + ": 0x" + ss.str()));
+ }
+ }
+
+ if (state != 0)
+ {
+ // we finish reading, but do not accept: string was incomplete
+ std::stringstream ss;
+ ss << std::setw(2) << std::uppercase << std::setfill('0') << std::hex << static_cast<int>(static_cast<uint8_t>(str.back()));
+ JSON_THROW(type_error::create(316, "incomplete UTF-8 string; last byte: 0x" + ss.str()));
+ }
+ }
+
private:
/// the output of the serializer
output_adapter_t<char> o = nullptr;
@@ -6771,27 +6841,20 @@ class json_ref
using value_type = BasicJsonType;
json_ref(value_type&& value)
- : owned_value(std::move(value)),
- value_ref(&owned_value),
- is_rvalue(true)
+ : owned_value(std::move(value)), value_ref(&owned_value), is_rvalue(true)
{}
json_ref(const value_type& value)
- : value_ref(const_cast<value_type*>(&value)),
- is_rvalue(false)
+ : value_ref(const_cast<value_type*>(&value)), is_rvalue(false)
{}
json_ref(std::initializer_list<json_ref> init)
- : owned_value(init),
- value_ref(&owned_value),
- is_rvalue(true)
+ : owned_value(init), value_ref(&owned_value), is_rvalue(true)
{}
- template <class... Args>
+ template<class... Args>
json_ref(Args&& ... args)
- : owned_value(std::forward<Args>(args)...),
- value_ref(&owned_value),
- is_rvalue(true)
+ : owned_value(std::forward<Args>(args)...), value_ref(&owned_value), is_rvalue(true)
{}
// class should be movable only
@@ -6949,6 +7012,27 @@ class json_pointer
return to_string();
}
+ /*!
+ @param[in] s reference token to be converted into an array index
+
+ @return integer representation of @a s
+
+ @throw out_of_range.404 if string @a s could not be converted to an integer
+ */
+ static int array_index(const std::string& s)
+ {
+ size_t processed_chars = 0;
+ const int res = std::stoi(s, &processed_chars);
+
+ // check if the string was completely read
+ if (JSON_UNLIKELY(processed_chars != s.size()))
+ {
+ JSON_THROW(detail::out_of_range::create(404, "unresolved reference token '" + s + "'"));
+ }
+
+ return res;
+ }
+
private:
/*!
@brief remove and return last reference pointer
@@ -6984,7 +7068,6 @@ class json_pointer
return result;
}
-
/*!
@brief create and return a reference to the pointed to value
@@ -7320,11 +7403,11 @@ class basic_json
public:
using value_t = detail::value_t;
- // forward declarations
+ /// @copydoc nlohmann::json_pointer
using json_pointer = ::nlohmann::json_pointer;
template<typename T, typename SFINAE>
using json_serializer = JSONSerializer<T, SFINAE>;
-
+ /// helper type for initializer lists of basic_json values
using initializer_list_t = std::initializer_list<detail::json_ref<basic_json>>;
////////////////
@@ -7436,7 +7519,7 @@ class basic_json
result["url"] = "https://github.com/nlohmann/json";
result["version"] =
{
- {"string", "2.1.1"}, {"major", 2}, {"minor", 1}, {"patch", 1}
+ {"string", "3.0.1"}, {"major", 3}, {"minor", 0}, {"patch", 1}
};
#ifdef _WIN32
@@ -7489,6 +7572,14 @@ class basic_json
/// the template arguments passed to class @ref basic_json.
/// @{
+#if defined(JSON_HAS_CPP_14)
+ // Use transparent comparator if possible, combined with perfect forwarding
+ // on find() and count() calls prevents unnecessary string construction.
+ using object_comparator_t = std::less<>;
+#else
+ using object_comparator_t = std::less<StringType>;
+#endif
+
/*!
@brief a type for an object
@@ -7572,14 +7663,6 @@ class basic_json
7159](http://rfc7159.net/rfc7159), because any order implements the
specified "unordered" nature of JSON objects.
*/
-
-#if defined(JSON_HAS_CPP_14)
- // Use transparent comparator if possible, combined with perfect forwarding
- // on find() and count() calls prevents unnecessary string construction.
- using object_comparator_t = std::less<>;
-#else
- using object_comparator_t = std::less<StringType>;
-#endif
using object_t = ObjectType<StringType,
basic_json,
object_comparator_t,
@@ -7931,12 +8014,14 @@ class basic_json
static T* create(Args&& ... args)
{
AllocatorType<T> alloc;
+ using AllocatorTraits = std::allocator_traits<AllocatorType<T>>;
+
auto deleter = [&](T * object)
{
- alloc.deallocate(object, 1);
+ AllocatorTraits::deallocate(alloc, object, 1);
};
- std::unique_ptr<T, decltype(deleter)> object(alloc.allocate(1), deleter);
- alloc.construct(object.get(), std::forward<Args>(args)...);
+ std::unique_ptr<T, decltype(deleter)> object(AllocatorTraits::allocate(alloc, 1), deleter);
+ AllocatorTraits::construct(alloc, object.get(), std::forward<Args>(args)...);
assert(object != nullptr);
return object.release();
}
@@ -8054,7 +8139,7 @@ class basic_json
object = nullptr; // silence warning, see #821
if (JSON_UNLIKELY(t == value_t::null))
{
- JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 2.1.1")); // LCOV_EXCL_LINE
+ JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.0.1")); // LCOV_EXCL_LINE
}
break;
}
@@ -8104,24 +8189,24 @@ class basic_json
case value_t::object:
{
AllocatorType<object_t> alloc;
- alloc.destroy(object);
- alloc.deallocate(object, 1);
+ std::allocator_traits<decltype(alloc)>::destroy(alloc, object);
+ std::allocator_traits<decltype(alloc)>::deallocate(alloc, object, 1);
break;
}
case value_t::array:
{
AllocatorType<array_t> alloc;
- alloc.destroy(array);
- alloc.deallocate(array, 1);
+ std::allocator_traits<decltype(alloc)>::destroy(alloc, array);
+ std::allocator_traits<decltype(alloc)>::deallocate(alloc, array, 1);
break;
}
case value_t::string:
{
AllocatorType<string_t> alloc;
- alloc.destroy(string);
- alloc.deallocate(string, 1);
+ std::allocator_traits<decltype(alloc)>::destroy(alloc, string);
+ std::allocator_traits<decltype(alloc)>::deallocate(alloc, string, 1);
break;
}
@@ -8154,6 +8239,21 @@ class basic_json
// JSON parser callback //
//////////////////////////
+ /*!
+ @brief parser event types
+
+ The parser callback distinguishes the following events:
+ - `object_start`: the parser read `{` and started to process a JSON object
+ - `key`: the parser read a key of a value in an object
+ - `object_end`: the parser read `}` and finished processing a JSON object
+ - `array_start`: the parser read `[` and started to process a JSON array
+ - `array_end`: the parser read `]` and finished processing a JSON array
+ - `value`: the parser finished reading a JSON value
+
+ @image html callback_events.png "Example when certain parse events are triggered"
+
+ @sa @ref parser_callback_t for more information and examples
+ */
using parse_event_t = typename parser::parse_event_t;
/*!
@@ -8280,7 +8380,7 @@ class basic_json
@brief create a JSON value
This is a "catch all" constructor for all compatible JSON types; that is,
- types for which a `to_json()` method exsits. The constructor forwards the
+ types for which a `to_json()` method exists. The constructor forwards the
parameter @a val to that method (to `json_serializer<U>::to_json` method
with `U = uncvref_t<CompatibleType>`, to be exact).
@@ -8952,11 +9052,14 @@ class basic_json
@param[in] indent_char The character to use for indentation if @a indent is
greater than `0`. The default is ` ` (space).
@param[in] ensure_ascii If @a ensure_ascii is true, all non-ASCII characters
- in the output are escaped with \uXXXX sequences, and the result consists
+ in the output are escaped with `\uXXXX` sequences, and the result consists
of ASCII characters only.
@return string containing the serialization of the JSON value
+ @throw type_error.316 if a string stored inside the JSON value is not
+ UTF-8 encoded
+
@complexity Linear.
@exceptionsafety Strong guarantee: if an exception is thrown, there are no
@@ -8968,8 +9071,8 @@ class basic_json
@see https://docs.python.org/2/library/json.html#json.dump
- @since version 1.0.0; indentation character @a indent_char and option
- @a ensure_ascii added in version 3.0.0
+ @since version 1.0.0; indentation character @a indent_char, option
+ @a ensure_ascii and exceptions added in version 3.0.0
*/
string_t dump(const int indent = -1, const char indent_char = ' ',
const bool ensure_ascii = false) const
@@ -9003,7 +9106,7 @@ class basic_json
string | value_t::string
number (integer) | value_t::number_integer
number (unsigned integer) | value_t::number_unsigned
- number (foating-point) | value_t::number_float
+ number (floating-point) | value_t::number_float
object | value_t::object
array | value_t::array
discarded | value_t::discarded
@@ -9507,11 +9610,9 @@ class basic_json
@since version 2.1.0
*/
- template <
- typename BasicJsonType,
- detail::enable_if_t<std::is_same<typename std::remove_const<BasicJsonType>::type,
- basic_json_t>::value,
- int> = 0 >
+ template<typename BasicJsonType, detail::enable_if_t<
+ std::is_same<typename std::remove_const<BasicJsonType>::type, basic_json_t>::value,
+ int> = 0>
basic_json get() const
{
return *this;
@@ -9556,14 +9657,12 @@ class basic_json
@since version 2.1.0
*/
- template <
- typename ValueTypeCV,
- typename ValueType = detail::uncvref_t<ValueTypeCV>,
- detail::enable_if_t <
- not std::is_same<basic_json_t, ValueType>::value and
- detail::has_from_json<basic_json_t, ValueType>::value and
- not detail::has_non_default_from_json<basic_json_t, ValueType>::value,
- int > = 0 >
+ template<typename ValueTypeCV, typename ValueType = detail::uncvref_t<ValueTypeCV>,
+ detail::enable_if_t <
+ not std::is_same<basic_json_t, ValueType>::value and
+ detail::has_from_json<basic_json_t, ValueType>::value and
+ not detail::has_non_default_from_json<basic_json_t, ValueType>::value,
+ int> = 0>
ValueType get() const noexcept(noexcept(
JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>(), std::declval<ValueType&>())))
{
@@ -9611,12 +9710,10 @@ class basic_json
@since version 2.1.0
*/
- template <
- typename ValueTypeCV,
- typename ValueType = detail::uncvref_t<ValueTypeCV>,
- detail::enable_if_t<not std::is_same<basic_json_t, ValueType>::value and
- detail::has_non_default_from_json<basic_json_t,
- ValueType>::value, int> = 0 >
+ template<typename ValueTypeCV, typename ValueType = detail::uncvref_t<ValueTypeCV>,
+ detail::enable_if_t<not std::is_same<basic_json_t, ValueType>::value and
+ detail::has_non_default_from_json<basic_json_t, ValueType>::value,
+ int> = 0>
ValueType get() const noexcept(noexcept(
JSONSerializer<ValueTypeCV>::from_json(std::declval<const basic_json_t&>())))
{
@@ -10110,7 +10207,7 @@ class basic_json
@return const reference to the element at index @a idx
- @throw type_error.305 if the JSON value is not an array; in that cases,
+ @throw type_error.305 if the JSON value is not an array; in that case,
using the [] operator with an index makes no sense.
@complexity Constant.
@@ -10193,7 +10290,7 @@ class basic_json
@pre The element with key @a key must exist. **This precondition is
enforced with an assertion.**
- @throw type_error.305 if the JSON value is not an object; in that cases,
+ @throw type_error.305 if the JSON value is not an object; in that case,
using the [] operator with a key makes no sense.
@complexity Logarithmic in the size of the container.
@@ -10282,7 +10379,7 @@ class basic_json
@pre The element with key @a key must exist. **This precondition is
enforced with an assertion.**
- @throw type_error.305 if the JSON value is not an object; in that cases,
+ @throw type_error.305 if the JSON value is not an object; in that case,
using the [] operator with a key makes no sense.
@complexity Logarithmic in the size of the container.
@@ -10342,7 +10439,7 @@ class basic_json
@return copy of the element at key @a key or @a default_value if @a key
is not found
- @throw type_error.306 if the JSON value is not an objec; in that cases,
+ @throw type_error.306 if the JSON value is not an object; in that case,
using `value()` with a key makes no sense.
@complexity Logarithmic in the size of the container.
@@ -10415,7 +10512,7 @@ class basic_json
@return copy of the element at key @a key or @a default_value if @a key
is not found
- @throw type_error.306 if the JSON value is not an objec; in that cases,
+ @throw type_error.306 if the JSON value is not an objec; in that case,
using `value()` with a key makes no sense.
@complexity Logarithmic in the size of the container.
@@ -10619,8 +10716,8 @@ class basic_json
if (is_string())
{
AllocatorType<string_t> alloc;
- alloc.destroy(m_value.string);
- alloc.deallocate(m_value.string, 1);
+ std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string);
+ std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1);
m_value.string = nullptr;
}
@@ -10725,8 +10822,8 @@ class basic_json
if (is_string())
{
AllocatorType<string_t> alloc;
- alloc.destroy(m_value.string);
- alloc.deallocate(m_value.string, 1);
+ std::allocator_traits<decltype(alloc)>::destroy(alloc, m_value.string);
+ std::allocator_traits<decltype(alloc)>::deallocate(alloc, m_value.string, 1);
m_value.string = nullptr;
}
@@ -11220,22 +11317,62 @@ class basic_json
reference to the JSON values is returned, so there is no access to the
underlying iterator.
+ For loop without iterator_wrapper:
+
+ @code{cpp}
+ for (auto it = j_object.begin(); it != j_object.end(); ++it)
+ {
+ std::cout << "key: " << it.key() << ", value:" << it.value() << '\n';
+ }
+ @endcode
+
+ Range-based for loop without iterator proxy:
+
+ @code{cpp}
+ for (auto it : j_object)
+ {
+ // "it" is of type json::reference and has no key() member
+ std::cout << "value: " << it << '\n';
+ }
+ @endcode
+
+ Range-based for loop with iterator proxy:
+
+ @code{cpp}
+ for (auto it : json::iterator_wrapper(j_object))
+ {
+ std::cout << "key: " << it.key() << ", value:" << it.value() << '\n';
+ }
+ @endcode
+
+ @note When iterating over an array, `key()` will return the index of the
+ element as string (see example).
+
+ @param[in] ref reference to a JSON value
+ @return iteration proxy object wrapping @a ref with an interface to use in
+ range-based for loops
+
@liveexample{The following code shows how the wrapper is used,iterator_wrapper}
+ @exceptionsafety Strong guarantee: if an exception is thrown, there are no
+ changes in the JSON value.
+
+ @complexity Constant.
+
@note The name of this function is not yet final and may change in the
future.
*/
- static iteration_proxy<iterator> iterator_wrapper(reference cont)
+ static iteration_proxy<iterator> iterator_wrapper(reference ref)
{
- return iteration_proxy<iterator>(cont);
+ return iteration_proxy<iterator>(ref);
}
/*!
@copydoc iterator_wrapper(reference)
*/
- static iteration_proxy<const_iterator> iterator_wrapper(const_reference cont)
+ static iteration_proxy<const_iterator> iterator_wrapper(const_reference ref)
{
- return iteration_proxy<const_iterator>(cont);
+ return iteration_proxy<const_iterator>(ref);
}
/// @}
@@ -12120,7 +12257,7 @@ class basic_json
JSON_THROW(type_error::create(312, "cannot use update() with " + std::string(j.type_name())));
}
- for (auto it = j.begin(); it != j.end(); ++it)
+ for (auto it = j.cbegin(); it != j.cend(); ++it)
{
m_value.object->operator[](it.key()) = it.value();
}
@@ -12341,7 +12478,7 @@ class basic_json
[comparison function](https://github.com/mariokonrad/marnav/blob/master/src/marnav/math/floatingpoint.hpp#L34-#L39)
could be used, for instance
@code {.cpp}
- template <typename T, typename = typename std::enable_if<std::is_floating_point<T>::value, T>::type>
+ template<typename T, typename = typename std::enable_if<std::is_floating_point<T>::value, T>::type>
inline bool is_same(T a, T b, T epsilon = std::numeric_limits<T>::epsilon()) noexcept
{
return std::abs(a - b) <= epsilon;
@@ -12769,7 +12906,7 @@ class basic_json
`std::setw(4)` on @a o sets the indentation level to `4` and the
serialization result is the same as calling `dump(4)`.
- - The indentation characrer can be controlled with the member variable
+ - The indentation character can be controlled with the member variable
`fill` of the output stream @a o. For instance, the manipulator
`std::setfill('\\t')` sets indentation to use a tab character rather than
the default space character.
@@ -12779,12 +12916,15 @@ class basic_json
@return the stream @a o
+ @throw type_error.316 if a string stored inside the JSON value is not
+ UTF-8 encoded
+
@complexity Linear.
@liveexample{The example below shows the serialization with different
parameters to `width` to adjust the indentation level.,operator_serialize}
- @since version 1.0.0; indentaction character added in version 3.0.0
+ @since version 1.0.0; indentation character added in version 3.0.0
*/
friend std::ostream& operator<<(std::ostream& o, const basic_json& j)
{
@@ -13124,40 +13264,40 @@ class basic_json
JSON value type | value/range | CBOR type | first byte
--------------- | ------------------------------------------ | ---------------------------------- | ---------------
- null | `null` | Null | 0xf6
- boolean | `true` | True | 0xf5
- boolean | `false` | False | 0xf4
- number_integer | -9223372036854775808..-2147483649 | Negative integer (8 bytes follow) | 0x3b
- number_integer | -2147483648..-32769 | Negative integer (4 bytes follow) | 0x3a
+ null | `null` | Null | 0xF6
+ boolean | `true` | True | 0xF5
+ boolean | `false` | False | 0xF4
+ number_integer | -9223372036854775808..-2147483649 | Negative integer (8 bytes follow) | 0x3B
+ number_integer | -2147483648..-32769 | Negative integer (4 bytes follow) | 0x3A
number_integer | -32768..-129 | Negative integer (2 bytes follow) | 0x39
number_integer | -128..-25 | Negative integer (1 byte follow) | 0x38
number_integer | -24..-1 | Negative integer | 0x20..0x37
number_integer | 0..23 | Integer | 0x00..0x17
number_integer | 24..255 | Unsigned integer (1 byte follow) | 0x18
number_integer | 256..65535 | Unsigned integer (2 bytes follow) | 0x19
- number_integer | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1a
- number_integer | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1b
+ number_integer | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A
+ number_integer | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B
number_unsigned | 0..23 | Integer | 0x00..0x17
number_unsigned | 24..255 | Unsigned integer (1 byte follow) | 0x18
number_unsigned | 256..65535 | Unsigned integer (2 bytes follow) | 0x19
- number_unsigned | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1a
- number_unsigned | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1b
- number_float | *any value* | Double-Precision Float | 0xfb
+ number_unsigned | 65536..4294967295 | Unsigned integer (4 bytes follow) | 0x1A
+ number_unsigned | 4294967296..18446744073709551615 | Unsigned integer (8 bytes follow) | 0x1B
+ number_float | *any value* | Double-Precision Float | 0xFB
string | *length*: 0..23 | UTF-8 string | 0x60..0x77
string | *length*: 23..255 | UTF-8 string (1 byte follow) | 0x78
string | *length*: 256..65535 | UTF-8 string (2 bytes follow) | 0x79
- string | *length*: 65536..4294967295 | UTF-8 string (4 bytes follow) | 0x7a
- string | *length*: 4294967296..18446744073709551615 | UTF-8 string (8 bytes follow) | 0x7b
+ string | *length*: 65536..4294967295 | UTF-8 string (4 bytes follow) | 0x7A
+ string | *length*: 4294967296..18446744073709551615 | UTF-8 string (8 bytes follow) | 0x7B
array | *size*: 0..23 | array | 0x80..0x97
array | *size*: 23..255 | array (1 byte follow) | 0x98
array | *size*: 256..65535 | array (2 bytes follow) | 0x99
- array | *size*: 65536..4294967295 | array (4 bytes follow) | 0x9a
- array | *size*: 4294967296..18446744073709551615 | array (8 bytes follow) | 0x9b
- object | *size*: 0..23 | map | 0xa0..0xb7
- object | *size*: 23..255 | map (1 byte follow) | 0xb8
- object | *size*: 256..65535 | map (2 bytes follow) | 0xb9
- object | *size*: 65536..4294967295 | map (4 bytes follow) | 0xba
- object | *size*: 4294967296..18446744073709551615 | map (8 bytes follow) | 0xbb
+ array | *size*: 65536..4294967295 | array (4 bytes follow) | 0x9A
+ array | *size*: 4294967296..18446744073709551615 | array (8 bytes follow) | 0x9B
+ object | *size*: 0..23 | map | 0xA0..0xB7
+ object | *size*: 23..255 | map (1 byte follow) | 0xB8
+ object | *size*: 256..65535 | map (2 bytes follow) | 0xB9
+ object | *size*: 65536..4294967295 | map (4 bytes follow) | 0xBA
+ object | *size*: 4294967296..18446744073709551615 | map (8 bytes follow) | 0xBB
@note The mapping is **complete** in the sense that any JSON value type
can be converted to a CBOR value.
@@ -13167,20 +13307,20 @@ class basic_json
function which serializes NaN or Infinity to `null`.
@note The following CBOR types are not used in the conversion:
- - byte strings (0x40..0x5f)
- - UTF-8 strings terminated by "break" (0x7f)
- - arrays terminated by "break" (0x9f)
- - maps terminated by "break" (0xbf)
- - date/time (0xc0..0xc1)
- - bignum (0xc2..0xc3)
- - decimal fraction (0xc4)
- - bigfloat (0xc5)
- - tagged items (0xc6..0xd4, 0xd8..0xdb)
- - expected conversions (0xd5..0xd7)
- - simple values (0xe0..0xf3, 0xf8)
- - undefined (0xf7)
- - half and single-precision floats (0xf9-0xfa)
- - break (0xff)
+ - byte strings (0x40..0x5F)
+ - UTF-8 strings terminated by "break" (0x7F)
+ - arrays terminated by "break" (0x9F)
+ - maps terminated by "break" (0xBF)
+ - date/time (0xC0..0xC1)
+ - bignum (0xC2..0xC3)
+ - decimal fraction (0xC4)
+ - bigfloat (0xC5)
+ - tagged items (0xC6..0xD4, 0xD8..0xDB)
+ - expected conversions (0xD5..0xD7)
+ - simple values (0xE0..0xF3, 0xF8)
+ - undefined (0xF7)
+ - half and single-precision floats (0xF9-0xFA)
+ - break (0xFF)
@param[in] j JSON value to serialize
@return MessagePack serialization as byte vector
@@ -13226,35 +13366,35 @@ class basic_json
JSON value type | value/range | MessagePack type | first byte
--------------- | --------------------------------- | ---------------- | ----------
- null | `null` | nil | 0xc0
- boolean | `true` | true | 0xc3
- boolean | `false` | false | 0xc2
- number_integer | -9223372036854775808..-2147483649 | int64 | 0xd3
- number_integer | -2147483648..-32769 | int32 | 0xd2
- number_integer | -32768..-129 | int16 | 0xd1
- number_integer | -128..-33 | int8 | 0xd0
- number_integer | -32..-1 | negative fixint | 0xe0..0xff
- number_integer | 0..127 | positive fixint | 0x00..0x7f
- number_integer | 128..255 | uint 8 | 0xcc
- number_integer | 256..65535 | uint 16 | 0xcd
- number_integer | 65536..4294967295 | uint 32 | 0xce
- number_integer | 4294967296..18446744073709551615 | uint 64 | 0xcf
- number_unsigned | 0..127 | positive fixint | 0x00..0x7f
- number_unsigned | 128..255 | uint 8 | 0xcc
- number_unsigned | 256..65535 | uint 16 | 0xcd
- number_unsigned | 65536..4294967295 | uint 32 | 0xce
- number_unsigned | 4294967296..18446744073709551615 | uint 64 | 0xcf
- number_float | *any value* | float 64 | 0xcb
- string | *length*: 0..31 | fixstr | 0xa0..0xbf
- string | *length*: 32..255 | str 8 | 0xd9
- string | *length*: 256..65535 | str 16 | 0xda
- string | *length*: 65536..4294967295 | str 32 | 0xdb
- array | *size*: 0..15 | fixarray | 0x90..0x9f
- array | *size*: 16..65535 | array 16 | 0xdc
- array | *size*: 65536..4294967295 | array 32 | 0xdd
- object | *size*: 0..15 | fix map | 0x80..0x8f
- object | *size*: 16..65535 | map 16 | 0xde
- object | *size*: 65536..4294967295 | map 32 | 0xdf
+ null | `null` | nil | 0xC0
+ boolean | `true` | true | 0xC3
+ boolean | `false` | false | 0xC2
+ number_integer | -9223372036854775808..-2147483649 | int64 | 0xD3
+ number_integer | -2147483648..-32769 | int32 | 0xD2
+ number_integer | -32768..-129 | int16 | 0xD1
+ number_integer | -128..-33 | int8 | 0xD0
+ number_integer | -32..-1 | negative fixint | 0xE0..0xFF
+ number_integer | 0..127 | positive fixint | 0x00..0x7F
+ number_integer | 128..255 | uint 8 | 0xCC
+ number_integer | 256..65535 | uint 16 | 0xCD
+ number_integer | 65536..4294967295 | uint 32 | 0xCE
+ number_integer | 4294967296..18446744073709551615 | uint 64 | 0xCF
+ number_unsigned | 0..127 | positive fixint | 0x00..0x7F
+ number_unsigned | 128..255 | uint 8 | 0xCC
+ number_unsigned | 256..65535 | uint 16 | 0xCD
+ number_unsigned | 65536..4294967295 | uint 32 | 0xCE
+ number_unsigned | 4294967296..18446744073709551615 | uint 64 | 0xCF
+ number_float | *any value* | float 64 | 0xCB
+ string | *length*: 0..31 | fixstr | 0xA0..0xBF
+ string | *length*: 32..255 | str 8 | 0xD9
+ string | *length*: 256..65535 | str 16 | 0xDA
+ string | *length*: 65536..4294967295 | str 32 | 0xDB
+ array | *size*: 0..15 | fixarray | 0x90..0x9F
+ array | *size*: 16..65535 | array 16 | 0xDC
+ array | *size*: 65536..4294967295 | array 32 | 0xDD
+ object | *size*: 0..15 | fix map | 0x80..0x8F
+ object | *size*: 16..65535 | map 16 | 0xDE
+ object | *size*: 65536..4294967295 | map 32 | 0xDF
@note The mapping is **complete** in the sense that any JSON value type
can be converted to a MessagePack value.
@@ -13265,10 +13405,10 @@ class basic_json
- objects with more than 4294967295 elements
@note The following MessagePack types are not used in the conversion:
- - bin 8 - bin 32 (0xc4..0xc6)
- - ext 8 - ext 32 (0xc7..0xc9)
- - float 32 (0xca)
- - fixext 1 - fixext 16 (0xd4..0xd8)
+ - bin 8 - bin 32 (0xC4..0xC6)
+ - ext 8 - ext 32 (0xC7..0xC9)
+ - float 32 (0xCA)
+ - fixext 1 - fixext 16 (0xD4..0xD8)
@note Any MessagePack output created @ref to_msgpack can be successfully
parsed by @ref from_msgpack.
@@ -13322,51 +13462,51 @@ class basic_json
Integer | number_unsigned | 0x00..0x17
Unsigned integer | number_unsigned | 0x18
Unsigned integer | number_unsigned | 0x19
- Unsigned integer | number_unsigned | 0x1a
- Unsigned integer | number_unsigned | 0x1b
+ Unsigned integer | number_unsigned | 0x1A
+ Unsigned integer | number_unsigned | 0x1B
Negative integer | number_integer | 0x20..0x37
Negative integer | number_integer | 0x38
Negative integer | number_integer | 0x39
- Negative integer | number_integer | 0x3a
- Negative integer | number_integer | 0x3b
+ Negative integer | number_integer | 0x3A
+ Negative integer | number_integer | 0x3B
Negative integer | number_integer | 0x40..0x57
UTF-8 string | string | 0x60..0x77
UTF-8 string | string | 0x78
UTF-8 string | string | 0x79
- UTF-8 string | string | 0x7a
- UTF-8 string | string | 0x7b
- UTF-8 string | string | 0x7f
+ UTF-8 string | string | 0x7A
+ UTF-8 string | string | 0x7B
+ UTF-8 string | string | 0x7F
array | array | 0x80..0x97
array | array | 0x98
array | array | 0x99
- array | array | 0x9a
- array | array | 0x9b
- array | array | 0x9f
- map | object | 0xa0..0xb7
- map | object | 0xb8
- map | object | 0xb9
- map | object | 0xba
- map | object | 0xbb
- map | object | 0xbf
- False | `false` | 0xf4
- True | `true` | 0xf5
- Nill | `null` | 0xf6
- Half-Precision Float | number_float | 0xf9
- Single-Precision Float | number_float | 0xfa
- Double-Precision Float | number_float | 0xfb
+ array | array | 0x9A
+ array | array | 0x9B
+ array | array | 0x9F
+ map | object | 0xA0..0xB7
+ map | object | 0xB8
+ map | object | 0xB9
+ map | object | 0xBA
+ map | object | 0xBB
+ map | object | 0xBF
+ False | `false` | 0xF4
+ True | `true` | 0xF5
+ Nill | `null` | 0xF6
+ Half-Precision Float | number_float | 0xF9
+ Single-Precision Float | number_float | 0xFA
+ Double-Precision Float | number_float | 0xFB
@warning The mapping is **incomplete** in the sense that not all CBOR
types can be converted to a JSON value. The following CBOR types
are not supported and will yield parse errors (parse_error.112):
- - byte strings (0x40..0x5f)
- - date/time (0xc0..0xc1)
- - bignum (0xc2..0xc3)
- - decimal fraction (0xc4)
- - bigfloat (0xc5)
- - tagged items (0xc6..0xd4, 0xd8..0xdb)
- - expected conversions (0xd5..0xd7)
- - simple values (0xe0..0xf3, 0xf8)
- - undefined (0xf7)
+ - byte strings (0x40..0x5F)
+ - date/time (0xC0..0xC1)
+ - bignum (0xC2..0xC3)
+ - decimal fraction (0xC4)
+ - bigfloat (0xC5)
+ - tagged items (0xC6..0xD4, 0xD8..0xDB)
+ - expected conversions (0xD5..0xD7)
+ - simple values (0xE0..0xF3, 0xF8)
+ - undefined (0xF7)
@warning CBOR allows map keys of any type, whereas JSON only allows
strings as keys in object values. Therefore, CBOR maps with keys
@@ -13426,38 +13566,38 @@ class basic_json
MessagePack type | JSON value type | first byte
---------------- | --------------- | ----------
- positive fixint | number_unsigned | 0x00..0x7f
- fixmap | object | 0x80..0x8f
- fixarray | array | 0x90..0x9f
- fixstr | string | 0xa0..0xbf
- nil | `null` | 0xc0
- false | `false` | 0xc2
- true | `true` | 0xc3
- float 32 | number_float | 0xca
- float 64 | number_float | 0xcb
- uint 8 | number_unsigned | 0xcc
- uint 16 | number_unsigned | 0xcd
- uint 32 | number_unsigned | 0xce
- uint 64 | number_unsigned | 0xcf
- int 8 | number_integer | 0xd0
- int 16 | number_integer | 0xd1
- int 32 | number_integer | 0xd2
- int 64 | number_integer | 0xd3
- str 8 | string | 0xd9
- str 16 | string | 0xda
- str 32 | string | 0xdb
- array 16 | array | 0xdc
- array 32 | array | 0xdd
- map 16 | object | 0xde
- map 32 | object | 0xdf
- negative fixint | number_integer | 0xe0-0xff
+ positive fixint | number_unsigned | 0x00..0x7F
+ fixmap | object | 0x80..0x8F
+ fixarray | array | 0x90..0x9F
+ fixstr | string | 0xA0..0xBF
+ nil | `null` | 0xC0
+ false | `false` | 0xC2
+ true | `true` | 0xC3
+ float 32 | number_float | 0xCA
+ float 64 | number_float | 0xCB
+ uint 8 | number_unsigned | 0xCC
+ uint 16 | number_unsigned | 0xCD
+ uint 32 | number_unsigned | 0xCE
+ uint 64 | number_unsigned | 0xCF
+ int 8 | number_integer | 0xD0
+ int 16 | number_integer | 0xD1
+ int 32 | number_integer | 0xD2
+ int 64 | number_integer | 0xD3
+ str 8 | string | 0xD9
+ str 16 | string | 0xDA
+ str 32 | string | 0xDB
+ array 16 | array | 0xDC
+ array 32 | array | 0xDD
+ map 16 | object | 0xDE
+ map 32 | object | 0xDF
+ negative fixint | number_integer | 0xE0-0xFF
@warning The mapping is **incomplete** in the sense that not all
MessagePack types can be converted to a JSON value. The following
MessagePack types are not supported and will yield parse errors:
- - bin 8 - bin 32 (0xc4..0xc6)
- - ext 8 - ext 32 (0xc7..0xc9)
- - fixext 1 - fixext 16 (0xd4..0xd8)
+ - bin 8 - bin 32 (0xC4..0xC6)
+ - ext 8 - ext 32 (0xC7..0xC9)
+ - fixext 1 - fixext 16 (0xD4..0xD8)
@note Any MessagePack output created @ref to_msgpack can be successfully
parsed by @ref from_msgpack.
@@ -13601,6 +13741,9 @@ class basic_json
pointer @a ptr. As `at` provides checked access (and no elements are
implicitly inserted), the index '-' is always invalid. See example below.
+ @throw out_of_range.403 if the JSON pointer describes a key of an object
+ which cannot be found. See example below.
+
@throw out_of_range.404 if the JSON pointer @a ptr can not be resolved.
See example below.
@@ -13641,6 +13784,9 @@ class basic_json
pointer @a ptr. As `at` provides checked access (and no elements are
implicitly inserted), the index '-' is always invalid. See example below.
+ @throw out_of_range.403 if the JSON pointer describes a key of an object
+ which cannot be found. See example below.
+
@throw out_of_range.404 if the JSON pointer @a ptr can not be resolved.
See example below.
@@ -13856,7 +14002,7 @@ class basic_json
}
else
{
- const auto idx = std::stoi(last_path);
+ const auto idx = json_pointer::array_index(last_path);
if (JSON_UNLIKELY(static_cast<size_type>(idx) > parent.size()))
{
// avoid undefined behavior
@@ -13904,7 +14050,7 @@ class basic_json
else if (parent.is_array())
{
// note erase performs range check
- parent.erase(static_cast<size_type>(std::stoi(last_path)));
+ parent.erase(static_cast<size_type>(json_pointer::array_index(last_path)));
}
};
@@ -13999,7 +14145,12 @@ class basic_json
const json_pointer from_ptr(from_path);
// the "from" location must exist - use at()
- result[ptr] = result.at(from_ptr);
+ basic_json v = result.at(from_ptr);
+
+ // The copy is functionally identical to an "add"
+ // operation at the target location using the value
+ // specified in the "from" member.
+ operation_add(ptr, v);
break;
}
@@ -14141,7 +14292,7 @@ class basic_json
case value_t::object:
{
// first pass: traverse this object's elements
- for (auto it = source.begin(); it != source.end(); ++it)
+ for (auto it = source.cbegin(); it != source.cend(); ++it)
{
// escape the key name to be used in a JSON patch
const auto key = json_pointer::escape(it.key());
@@ -14163,7 +14314,7 @@ class basic_json
}
// second pass: traverse other object's elements
- for (auto it = target.begin(); it != target.end(); ++it)
+ for (auto it = target.cbegin(); it != target.cend(); ++it)
{
if (source.find(it.key()) == source.end())
{
@@ -14256,7 +14407,7 @@ json_pointer::get_and_create(NLOHMANN_BASIC_JSON_TPL& j) const
// create an entry in the array
JSON_TRY
{
- result = &result->operator[](static_cast<size_type>(std::stoi(reference_token)));
+ result = &result->operator[](static_cast<size_type>(array_index(reference_token)));
}
JSON_CATCH(std::invalid_argument&)
{
@@ -14333,7 +14484,7 @@ json_pointer::get_unchecked(NLOHMANN_BASIC_JSON_TPL* ptr) const
JSON_TRY
{
ptr = &ptr->operator[](
- static_cast<size_type>(std::stoi(reference_token)));
+ static_cast<size_type>(array_index(reference_token)));
}
JSON_CATCH(std::invalid_argument&)
{
@@ -14388,7 +14539,7 @@ json_pointer::get_checked(NLOHMANN_BASIC_JSON_TPL* ptr) const
// note: at performs range check
JSON_TRY
{
- ptr = &ptr->at(static_cast<size_type>(std::stoi(reference_token)));
+ ptr = &ptr->at(static_cast<size_type>(array_index(reference_token)));
}
JSON_CATCH(std::invalid_argument&)
{
@@ -14443,7 +14594,7 @@ json_pointer::get_unchecked(const NLOHMANN_BASIC_JSON_TPL* ptr) const
JSON_TRY
{
ptr = &ptr->operator[](
- static_cast<size_type>(std::stoi(reference_token)));
+ static_cast<size_type>(array_index(reference_token)));
}
JSON_CATCH(std::invalid_argument&)
{
@@ -14497,7 +14648,7 @@ json_pointer::get_checked(const NLOHMANN_BASIC_JSON_TPL* ptr) const
// note: at performs range check
JSON_TRY
{
- ptr = &ptr->at(static_cast<size_type>(std::stoi(reference_token)));
+ ptr = &ptr->at(static_cast<size_type>(array_index(reference_token)));
}
JSON_CATCH(std::invalid_argument&)
{
diff --git a/tests/build-remote.sh b/tests/build-remote.sh
index cf3bb4633..9bca0f4a3 100644
--- a/tests/build-remote.sh
+++ b/tests/build-remote.sh
@@ -2,7 +2,7 @@ source common.sh
clearStore
-if [[ $(uname) != Linux ]]; then exit; fi
+if ! canUseSandbox; then exit; fi
if [[ ! $SHELL =~ /nix/store ]]; then exit; fi
chmod -R u+w $TEST_ROOT/store0 || true
diff --git a/tests/check.nix b/tests/check.nix
new file mode 100644
index 000000000..08aac2fb0
--- /dev/null
+++ b/tests/check.nix
@@ -0,0 +1,17 @@
+with import ./config.nix;
+
+{
+ nondeterministic = mkDerivation {
+ name = "nondeterministic";
+ buildCommand =
+ ''
+ mkdir $out
+ date +%s.%N > $out/date
+ '';
+ };
+
+ fetchurl = import <nix/fetchurl.nix> {
+ url = "file://" + toString ./lang/eval-okay-xml.exp.xml;
+ sha256 = "0kg4sla7ihm8ijr8cb3117fhl99zrc2bwy1jrngsfmkh8bav4m0v";
+ };
+}
diff --git a/tests/check.sh b/tests/check.sh
new file mode 100644
index 000000000..b05e40ffb
--- /dev/null
+++ b/tests/check.sh
@@ -0,0 +1,32 @@
+source common.sh
+
+clearStore
+
+nix-build dependencies.nix --no-out-link
+nix-build dependencies.nix --no-out-link --check
+
+nix-build check.nix -A nondeterministic --no-out-link
+(! nix-build check.nix -A nondeterministic --no-out-link --check 2> $TEST_ROOT/log)
+grep 'may not be deterministic' $TEST_ROOT/log
+
+clearStore
+
+nix-build dependencies.nix --no-out-link --repeat 3
+
+(! nix-build check.nix -A nondeterministic --no-out-link --repeat 1 2> $TEST_ROOT/log)
+grep 'differs from previous round' $TEST_ROOT/log
+
+path=$(nix-build check.nix -A fetchurl --no-out-link --hashed-mirrors '')
+
+chmod +w $path
+echo foo > $path
+chmod -w $path
+
+nix-build check.nix -A fetchurl --no-out-link --check --hashed-mirrors ''
+
+# Note: "check" doesn't repair anything, it just compares to the hash stored in the database.
+[[ $(cat $path) = foo ]]
+
+nix-build check.nix -A fetchurl --no-out-link --repair --hashed-mirrors ''
+
+[[ $(cat $path) != foo ]]
diff --git a/tests/common.sh.in b/tests/common.sh.in
index 83643d8b0..195205988 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -11,7 +11,6 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_CONF_DIR=$TEST_ROOT/etc
-export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests
export _NIX_TEST_SHARED=$TEST_ROOT/shared
if [[ -n $NIX_STORE ]]; then
export _NIX_TEST_NO_SANDBOX=1
@@ -87,6 +86,24 @@ killDaemon() {
trap "" EXIT
}
+canUseSandbox() {
+ if [[ $(uname) != Linux ]]; then return 1; fi
+
+ if [ ! -L /proc/self/ns/user ]; then
+ echo "Kernel doesn't support user namespaces, skipping this test..."
+ return 1
+ fi
+
+ if [ -e /proc/sys/kernel/unprivileged_userns_clone ]; then
+ if [ "$(cat /proc/sys/kernel/unprivileged_userns_clone)" != 1 ]; then
+ echo "Unprivileged user namespaces disabled by sysctl, skipping this test..."
+ return 1
+ fi
+ fi
+
+ return 0
+}
+
fail() {
echo "$1"
exit 1
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
index 65d673c08..530ac7bb8 100644
--- a/tests/fetchGit.sh
+++ b/tests/fetchGit.sh
@@ -29,10 +29,17 @@ rev2=$(git -C $repo rev-parse HEAD)
path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
[[ $(cat $path/hello) = world ]]
+# In pure eval mode, fetchGit without a revision should fail.
+[[ $(nix eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))") = world ]]
+(! nix eval --pure-eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))")
+
# Fetch using an explicit revision hash.
path2=$(nix eval --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path2 ]]
+# In pure eval mode, fetchGit with a revision should succeed.
+[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]]
+
# Fetch again. This should be cached.
mv $repo ${repo}-tmp
path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
@@ -119,3 +126,16 @@ path4=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
# Confirm same as 'dev' branch
path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
[[ $path3 = $path5 ]]
+
+
+# Nuke the cache
+rm -rf $TEST_HOME/.cache/nix/git
+
+# Try again, but without 'git' on PATH
+NIX=$(command -v nix)
+# This should fail
+(! PATH= $NIX eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath" )
+
+# Try again, with 'git' available. This should work.
+path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
+[[ $path3 = $path5 ]]
diff --git a/tests/fetchMercurial.sh b/tests/fetchMercurial.sh
index 271350ecd..4088dbd39 100644
--- a/tests/fetchMercurial.sh
+++ b/tests/fetchMercurial.sh
@@ -29,10 +29,17 @@ rev2=$(hg log --cwd $repo -r tip --template '{node}')
path=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath")
[[ $(cat $path/hello) = world ]]
+# In pure eval mode, fetchGit without a revision should fail.
+[[ $(nix eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") = world ]]
+(! nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))")
+
# Fetch using an explicit revision hash.
path2=$(nix eval --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path2 ]]
+# In pure eval mode, fetchGit with a revision should succeed.
+[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]]
+
# Fetch again. This should be cached.
mv $repo ${repo}-tmp
path2=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath")
diff --git a/tests/fixed.sh b/tests/fixed.sh
index cac3f0be9..8f51403a7 100644
--- a/tests/fixed.sh
+++ b/tests/fixed.sh
@@ -5,15 +5,22 @@ clearStore
export IMPURE_VAR1=foo
export IMPURE_VAR2=bar
+path=$(nix-store -q $(nix-instantiate fixed.nix -A good.0))
+
+echo 'testing bad...'
+nix-build fixed.nix -A bad --no-out-link && fail "should fail"
+
+# Building with the bad hash should produce the "good" output path as
+# a side-effect.
+[[ -e $path ]]
+nix path-info --json $path | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd
+
echo 'testing good...'
nix-build fixed.nix -A good --no-out-link
echo 'testing good2...'
nix-build fixed.nix -A good2 --no-out-link
-echo 'testing bad...'
-nix-build fixed.nix -A bad --no-out-link && fail "should fail"
-
echo 'testing reallyBad...'
nix-instantiate fixed.nix -A reallyBad && fail "should fail"
diff --git a/tests/lang/data b/tests/lang/data
new file mode 100644
index 000000000..257cc5642
--- /dev/null
+++ b/tests/lang/data
@@ -0,0 +1 @@
+foo
diff --git a/tests/lang/eval-okay-path.nix b/tests/lang/eval-okay-path.nix
new file mode 100644
index 000000000..e67168cf3
--- /dev/null
+++ b/tests/lang/eval-okay-path.nix
@@ -0,0 +1,7 @@
+builtins.path
+ { path = ./.;
+ filter = path: _: baseNameOf path == "data";
+ recursive = true;
+ sha256 = "1yhm3gwvg5a41yylymgblsclk95fs6jy72w0wv925mmidlhcq4sw";
+ name = "output";
+ }
diff --git a/tests/linux-sandbox.sh b/tests/linux-sandbox.sh
index 4a686bb59..acfd46c54 100644
--- a/tests/linux-sandbox.sh
+++ b/tests/linux-sandbox.sh
@@ -2,7 +2,7 @@ source common.sh
clearStore
-if [[ $(uname) != Linux ]]; then exit; fi
+if ! canUseSandbox; then exit; fi
# Note: we need to bind-mount $SHELL into the chroot. Currently we
# only support the case where $SHELL is in the Nix store, because
diff --git a/tests/local.mk b/tests/local.mk
index 83154228e..51bc09dd4 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -20,7 +20,10 @@ nix_tests = \
fetchMercurial.sh \
signing.sh \
run.sh \
- brotli.sh
+ brotli.sh \
+ pure-eval.sh \
+ check.sh \
+ plugins.sh
# parallel.sh
install-tests += $(foreach x, $(nix_tests), tests/$(x))
@@ -29,4 +32,4 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh
-installcheck: $(d)/common.sh
+installcheck: $(d)/common.sh $(d)/plugins/plugintest.so
diff --git a/tests/misc.sh b/tests/misc.sh
index 6d0ab3adc..eda016416 100644
--- a/tests/misc.sh
+++ b/tests/misc.sh
@@ -16,4 +16,4 @@ nix-env --foo 2>&1 | grep "no operation"
nix-env -q --foo 2>&1 | grep "unknown flag"
# Eval Errors.
-nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 | grep "infinite recursion encountered, at (string):1:15$"
+nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 | grep "infinite recursion encountered, at .*(string).*:1:15$"
diff --git a/tests/nix-copy-closure.nix b/tests/nix-copy-closure.nix
index 0bf5b42d8..0dc147fb3 100644
--- a/tests/nix-copy-closure.nix
+++ b/tests/nix-copy-closure.nix
@@ -2,7 +2,7 @@
{ nixpkgs, system, nix }:
-with import (nixpkgs + /nixos/lib/testing.nix) { inherit system; };
+with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in {
@@ -29,10 +29,10 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in {
startAll;
# Create an SSH key on the client.
- my $key = `${pkgs.openssh}/bin/ssh-keygen -t dsa -f key -N ""`;
+ my $key = `${pkgs.openssh}/bin/ssh-keygen -t ed25519 -f key -N ""`;
$client->succeed("mkdir -m 700 /root/.ssh");
- $client->copyFileFromHost("key", "/root/.ssh/id_dsa");
- $client->succeed("chmod 600 /root/.ssh/id_dsa");
+ $client->copyFileFromHost("key", "/root/.ssh/id_ed25519");
+ $client->succeed("chmod 600 /root/.ssh/id_ed25519");
# Install the SSH key on the server.
$server->succeed("mkdir -m 700 /root/.ssh");
diff --git a/tests/plugins.sh b/tests/plugins.sh
new file mode 100644
index 000000000..23caf04f3
--- /dev/null
+++ b/tests/plugins.sh
@@ -0,0 +1,7 @@
+source common.sh
+
+set -o pipefail
+
+res=$(nix eval '(builtins.anotherNull)' --option plugin-files $PWD/plugins/plugintest.so)
+
+[ "$res"x = "nullx" ]
diff --git a/tests/plugins/local.mk b/tests/plugins/local.mk
new file mode 100644
index 000000000..a5f19b087
--- /dev/null
+++ b/tests/plugins/local.mk
@@ -0,0 +1,9 @@
+libraries += plugintest
+
+plugintest_DIR := $(d)
+
+plugintest_SOURCES := $(d)/plugintest.cc
+
+plugintest_ALLOW_UNDEFINED := 1
+
+plugintest_EXCLUDE_FROM_LIBRARY_LIST := 1
diff --git a/tests/plugins/plugintest.cc b/tests/plugins/plugintest.cc
new file mode 100644
index 000000000..6b5e6d7cd
--- /dev/null
+++ b/tests/plugins/plugintest.cc
@@ -0,0 +1,10 @@
+#include "primops.hh"
+
+using namespace nix;
+
+static void prim_anotherNull (EvalState & state, const Pos & pos, Value ** args, Value & v)
+{
+ mkNull(v);
+}
+
+static RegisterPrimOp r("anotherNull", 0, prim_anotherNull);
diff --git a/tests/pure-eval.nix b/tests/pure-eval.nix
new file mode 100644
index 000000000..ed25b3d45
--- /dev/null
+++ b/tests/pure-eval.nix
@@ -0,0 +1,3 @@
+{
+ x = 123;
+}
diff --git a/tests/pure-eval.sh b/tests/pure-eval.sh
new file mode 100644
index 000000000..49c856448
--- /dev/null
+++ b/tests/pure-eval.sh
@@ -0,0 +1,18 @@
+source common.sh
+
+clearStore
+
+nix eval --pure-eval '(assert 1 + 2 == 3; true)'
+
+[[ $(nix eval '(builtins.readFile ./pure-eval.sh)') =~ clearStore ]]
+
+(! nix eval --pure-eval '(builtins.readFile ./pure-eval.sh)')
+
+(! nix eval --pure-eval '(builtins.currentTime)')
+(! nix eval --pure-eval '(builtins.currentSystem)')
+
+(! nix-instantiate --pure-eval ./simple.nix)
+
+[[ $(nix eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; })).x)") == 123 ]]
+(! nix eval --pure-eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; })).x)")
+nix eval --pure-eval "((import (builtins.fetchurl { url = file://$(pwd)/pure-eval.nix; sha256 = \"$(nix hash-file pure-eval.nix --type sha256)\"; })).x)"
diff --git a/tests/remote-builds.nix b/tests/remote-builds.nix
index 75704ace2..d7a4b2198 100644
--- a/tests/remote-builds.nix
+++ b/tests/remote-builds.nix
@@ -46,13 +46,13 @@ in
nix.buildMachines =
[ { hostName = "slave1";
sshUser = "root";
- sshKey = "/root/.ssh/id_dsa";
+ sshKey = "/root/.ssh/id_ed25519";
system = "i686-linux";
maxJobs = 1;
}
{ hostName = "slave2";
sshUser = "root";
- sshKey = "/root/.ssh/id_dsa";
+ sshKey = "/root/.ssh/id_ed25519";
system = "i686-linux";
maxJobs = 1;
}
@@ -70,10 +70,10 @@ in
startAll;
# Create an SSH key on the client.
- my $key = `${pkgs.openssh}/bin/ssh-keygen -t dsa -f key -N ""`;
+ my $key = `${pkgs.openssh}/bin/ssh-keygen -t ed25519 -f key -N ""`;
$client->succeed("mkdir -p -m 700 /root/.ssh");
- $client->copyFileFromHost("key", "/root/.ssh/id_dsa");
- $client->succeed("chmod 600 /root/.ssh/id_dsa");
+ $client->copyFileFromHost("key", "/root/.ssh/id_ed25519");
+ $client->succeed("chmod 600 /root/.ssh/id_ed25519");
# Install the SSH key on the slaves.
$client->waitForUnit("network.target");
diff --git a/tests/restricted.nix b/tests/restricted.nix
new file mode 100644
index 000000000..e0ef58402
--- /dev/null
+++ b/tests/restricted.nix
@@ -0,0 +1 @@
+1 + 2
diff --git a/tests/restricted.sh b/tests/restricted.sh
index c063c8693..0605383cc 100644
--- a/tests/restricted.sh
+++ b/tests/restricted.sh
@@ -3,7 +3,8 @@ source common.sh
clearStore
nix-instantiate --restrict-eval --eval -E '1 + 2'
-(! nix-instantiate --restrict-eval ./simple.nix)
+(! nix-instantiate --restrict-eval ./restricted.nix)
+(! nix-instantiate --eval --restrict-eval <(echo '1 + 2'))
nix-instantiate --restrict-eval ./simple.nix -I src=.
nix-instantiate --restrict-eval ./simple.nix -I src1=simple.nix -I src2=config.nix -I src3=./simple.builder.sh
@@ -28,3 +29,12 @@ nix eval --raw "(builtins.fetchurl file://$(pwd)/restricted.sh)" --restrict-eval
(! nix eval --raw "(builtins.fetchurl https://github.com/NixOS/patchelf/archive/master.tar.gz)" --restrict-eval)
(! nix eval --raw "(builtins.fetchTarball https://github.com/NixOS/patchelf/archive/master.tar.gz)" --restrict-eval)
(! nix eval --raw "(fetchGit git://github.com/NixOS/patchelf.git)" --restrict-eval)
+
+ln -sfn $(pwd)/restricted.nix $TEST_ROOT/restricted.nix
+[[ $(nix-instantiate --eval $TEST_ROOT/restricted.nix) == 3 ]]
+(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix)
+(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT)
+(! nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I .)
+nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT -I .
+
+[[ $(nix eval --raw --restrict-eval -I . '(builtins.readFile "${import ./simple.nix}/hello")') == 'Hello World!' ]]
diff --git a/tests/run.sh b/tests/run.sh
index 194e767dd..d1dbfd6bd 100644
--- a/tests/run.sh
+++ b/tests/run.sh
@@ -6,24 +6,23 @@ clearCache
nix run -f run.nix hello -c hello | grep 'Hello World'
nix run -f run.nix hello -c hello NixOS | grep 'Hello NixOS'
-if [[ $(uname) = Linux ]]; then
+if ! canUseSandbox; then exit; fi
- chmod -R u+w $TEST_ROOT/store0 || true
- rm -rf $TEST_ROOT/store0
+chmod -R u+w $TEST_ROOT/store0 || true
+rm -rf $TEST_ROOT/store0
- clearStore
+clearStore
- path=$(nix eval --raw -f run.nix hello)
+path=$(nix eval --raw -f run.nix hello)
- # Note: we need the sandbox paths to ensure that the shell is
- # visible in the sandbox.
- nix run --sandbox-build-dir /build-tmp \
- --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \
- --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World'
+# Note: we need the sandbox paths to ensure that the shell is
+# visible in the sandbox.
+nix run --sandbox-build-dir /build-tmp \
+ --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' \
+ --store $TEST_ROOT/store0 -f run.nix hello -c hello | grep 'Hello World'
- path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello')
+path2=$(nix run --sandbox-paths '/nix? /bin? /lib? /lib64? /usr?' --store $TEST_ROOT/store0 -f run.nix hello -c $SHELL -c 'type -p hello')
- [[ $path/bin/hello = $path2 ]]
+[[ $path/bin/hello = $path2 ]]
- [[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]]
-fi
+[[ -e $TEST_ROOT/store0/nix/store/$(basename $path)/bin/hello ]]
diff --git a/tests/shell.shebang.sh b/tests/shell.shebang.sh
index c8e55ca9b..f7132043d 100755
--- a/tests/shell.shebang.sh
+++ b/tests/shell.shebang.sh
@@ -1,4 +1,4 @@
#! @ENV_PROG@ nix-shell
-#! nix-shell -I nixpkgs=shell.nix --no-use-substitutes
+#! nix-shell -I nixpkgs=shell.nix --no-substitute
#! nix-shell --pure -i bash -p foo bar
echo "$(foo) $(bar) $@"
diff --git a/tests/user-envs.sh b/tests/user-envs.sh
index c4192fdc5..ba6392311 100644
--- a/tests/user-envs.sh
+++ b/tests/user-envs.sh
@@ -24,6 +24,9 @@ rm -f $HOME/.nix-defexpr
ln -s $(pwd)/user-envs.nix $HOME/.nix-defexpr
nix-env -qa '*' --description | grep -q silly
+# Query the system.
+nix-env -qa '*' --system | grep -q $system
+
# Install "foo-1.0".
nix-env -i foo-1.0
diff --git a/version b/version
index 35d51f33b..415b19fc3 100644
--- a/version
+++ b/version
@@ -1 +1 @@
-1.12 \ No newline at end of file
+2.0 \ No newline at end of file