aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--perl/lib/Nix/Store.xs12
-rw-r--r--src/build-remote/build-remote.cc2
-rw-r--r--src/libcmd/cmd-profiles.cc2
-rw-r--r--src/libexpr/eval-cache.cc2
-rw-r--r--src/libexpr/flake/flake.cc2
-rw-r--r--src/libexpr/primops.cc8
-rw-r--r--src/libexpr/primops/fetchMercurial.cc4
-rw-r--r--src/libexpr/primops/fetchTree.cc14
-rw-r--r--src/libfetchers/fetch-to-store.cc4
-rw-r--r--src/libfetchers/fetchers.cc10
-rw-r--r--src/libfetchers/git.cc16
-rw-r--r--src/libfetchers/github.cc14
-rw-r--r--src/libfetchers/indirect.cc4
-rw-r--r--src/libfetchers/mercurial.cc12
-rw-r--r--src/libfetchers/tarball.cc8
-rw-r--r--src/libstore/binary-cache-store.cc10
-rw-r--r--src/libstore/build/local-derivation-goal.cc16
-rw-r--r--src/libstore/build/worker.cc2
-rw-r--r--src/libstore/builtins/fetchurl.cc2
-rw-r--r--src/libstore/content-address.cc2
-rw-r--r--src/libstore/daemon.cc4
-rw-r--r--src/libstore/derivations.cc18
-rw-r--r--src/libstore/downstream-placeholder.cc8
-rw-r--r--src/libstore/export-import.cc6
-rw-r--r--src/libstore/gc.cc2
-rw-r--r--src/libstore/legacy-ssh-store.cc2
-rw-r--r--src/libstore/local-store.cc28
-rw-r--r--src/libstore/make-content-addressed.cc4
-rw-r--r--src/libstore/nar-info-disk-cache.cc4
-rw-r--r--src/libstore/nar-info.cc8
-rw-r--r--src/libstore/optimise-store.cc11
-rw-r--r--src/libstore/path-info.cc2
-rw-r--r--src/libstore/path-references.cc2
-rw-r--r--src/libstore/path.cc4
-rw-r--r--src/libstore/realisation.hh2
-rw-r--r--src/libstore/remote-store.cc8
-rw-r--r--src/libstore/remote-store.hh2
-rw-r--r--src/libstore/serve-protocol.cc2
-rw-r--r--src/libstore/store-api.cc24
-rw-r--r--src/libstore/store-api.hh10
-rw-r--r--src/libstore/worker-protocol.cc4
-rw-r--r--src/libutil/args.hh2
-rw-r--r--src/libutil/hash.cc66
-rw-r--r--src/libutil/hash.hh8
-rw-r--r--src/nix-store/nix-store.cc12
-rw-r--r--src/nix/add-to-store.cc4
-rw-r--r--src/nix/flake.cc8
-rw-r--r--src/nix/hash.cc50
-rw-r--r--src/nix/path-info.cc2
-rw-r--r--src/nix/prefetch.cc8
-rw-r--r--src/nix/verify.cc4
-rw-r--r--tests/unit/libstore/common-protocol.cc8
-rw-r--r--tests/unit/libstore/derivation.cc4
-rw-r--r--tests/unit/libstore/serve-protocol.cc10
-rw-r--r--tests/unit/libstore/worker-protocol.cc10
-rw-r--r--tests/unit/libutil-support/tests/hash.cc4
-rw-r--r--tests/unit/libutil/hash.cc16
57 files changed, 257 insertions, 260 deletions
diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs
index 4bef020d3..ac79f2777 100644
--- a/perl/lib/Nix/Store.xs
+++ b/perl/lib/Nix/Store.xs
@@ -77,7 +77,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path)
PPCODE:
try {
- auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
+ auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base::Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@@ -103,7 +103,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef);
else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
- auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
+ auto s = info->narHash.to_string(base32 ? Base::Base32 : Base::Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize);
@@ -205,7 +205,7 @@ SV * hashPath(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashPath(parseHashType(algo), path).first;
- auto s = h.to_string(base32 ? Base32 : Base16, false);
+ auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@@ -216,7 +216,7 @@ SV * hashFile(char * algo, int base32, char * path)
PPCODE:
try {
Hash h = hashFile(parseHashType(algo), path);
- auto s = h.to_string(base32 ? Base32 : Base16, false);
+ auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@@ -227,7 +227,7 @@ SV * hashString(char * algo, int base32, char * s)
PPCODE:
try {
Hash h = hashString(parseHashType(algo), s);
- auto s = h.to_string(base32 ? Base32 : Base16, false);
+ auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@@ -238,7 +238,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
PPCODE:
try {
auto h = Hash::parseAny(s, parseHashType(algo));
- auto s = h.to_string(toBase32 ? Base32 : Base16, false);
+ auto s = h.to_string(toBase32 ? Base::Base32 : Base::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 99bbc62d5..2450e80c2 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -42,7 +42,7 @@ static std::string makeLockFilename(const std::string & storeUri) {
// This avoids issues with the escaped URI being very long and causing
// path too long errors, while also avoiding any possibility of collision
// caused by simple truncation.
- auto hash = hashString(HashType::htSHA256, storeUri).to_string(Base::Base32, false);
+ auto hash = hashString(HashType::SHA256, storeUri).to_string(Base::Base32, false);
return escapeUri(storeUri).substr(0, 48) + "-" + hash.substr(0, 16);
}
diff --git a/src/libcmd/cmd-profiles.cc b/src/libcmd/cmd-profiles.cc
index 8a04100f6..cfce4789e 100644
--- a/src/libcmd/cmd-profiles.cc
+++ b/src/libcmd/cmd-profiles.cc
@@ -246,7 +246,7 @@ StorePath ProfileManifest::build(ref<Store> store)
StringSink sink;
sink << dumpPath(tempDir);
- auto narHash = hashString(htSHA256, sink.s);
+ auto narHash = hashString(HashType::SHA256, sink.s);
ValidPathInfo info{
*store,
diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc
index 9d35bab81..83bfd4fb0 100644
--- a/src/libexpr/eval-cache.cc
+++ b/src/libexpr/eval-cache.cc
@@ -50,7 +50,7 @@ struct AttrDb
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
createDirs(cacheDir);
- Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
+ Path dbPath = cacheDir + "/" + fingerprint.to_string(Base::Base16, false) + ".sqlite";
state->db = SQLite(dbPath);
state->db.isCache();
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 7379fc8fb..7f0e5b1e9 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -937,7 +937,7 @@ Fingerprint LockedFlake::getFingerprint() const
// FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
- return hashString(htSHA256,
+ return hashString(HashType::SHA256,
fmt("%s;%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(),
flake.lockedRef.subdir,
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 561492f86..228e4e1ba 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1336,7 +1336,7 @@ drvName, Bindings * attrs, Value & v)
state.error<EvalError>("derivation cannot be both content-addressed and impure")
.atPos(v).debugThrow();
- auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
+ auto ht = parseHashTypeOpt(outputHashAlgo).value_or(HashType::SHA256);
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
for (auto & i : outputs) {
@@ -1764,7 +1764,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
auto path = realisePath(state, pos, *args[1]);
- v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false));
+ v.mkString(hashString(*ht, path.readFile()).to_string(Base::Base16, false));
}
static RegisterPrimOp primop_hashFile({
@@ -2346,7 +2346,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
else if (n == "recursive")
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
else if (n == "sha256")
- expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256);
+ expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashType::SHA256);
else
state.error<EvalError>(
"unsupported argument '%1%' to 'addPath'",
@@ -3861,7 +3861,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
NixStringContext context; // discarded
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
- v.mkString(hashString(*ht, s).to_string(Base16, false));
+ v.mkString(hashString(*ht, s).to_string(Base::Base16, false));
}
static RegisterPrimOp primop_hashString({
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index 261722d1b..2031be299 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
// be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
if (std::regex_match(value.begin(), value.end(), revRegex))
- rev = Hash::parseAny(value, htSHA1);
+ rev = Hash::parseAny(value, HashType::SHA1);
else
ref = value;
}
@@ -73,7 +73,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
attrs2.alloc("branch").mkString(*input2.getRef());
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
- auto rev2 = input2.getRev().value_or(Hash(htSHA1));
+ auto rev2 = input2.getRev().value_or(Hash(HashType::SHA1));
attrs2.alloc("rev").mkString(rev2.gitRev());
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
if (auto revCount = input2.getRevCount())
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index e289fe9ca..b0e14a26e 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -32,7 +32,7 @@ void emitTreeAttrs(
auto narHash = input.getNarHash();
assert(narHash);
- attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
+ attrs.alloc("narHash").mkString(narHash->to_string(Base::SRI, true));
if (input.getType() == "git")
attrs.alloc("submodules").mkBool(
@@ -45,7 +45,7 @@ void emitTreeAttrs(
attrs.alloc("shortRev").mkString(rev->gitShortRev());
} else if (emptyRevFallback) {
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
- auto emptyHash = Hash(htSHA1);
+ auto emptyHash = Hash(HashType::SHA1);
attrs.alloc("rev").mkString(emptyHash.gitRev());
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
}
@@ -226,7 +226,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (n == "url")
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
else if (n == "sha256")
- expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256);
+ expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashType::SHA256);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
else
@@ -252,7 +252,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow();
// early exit if pinned and already in the store
- if (expectedHash && expectedHash->type == htSHA256) {
+ if (expectedHash && expectedHash->type == HashType::SHA256) {
auto expectedPath = state.store->makeFixedOutputPath(
name,
FixedOutputInfo {
@@ -277,13 +277,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
- : hashFile(htSHA256, state.store->toRealPath(storePath));
+ : hashFile(HashType::SHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash) {
state.error<EvalError>(
"hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url,
- expectedHash->to_string(Base32, true),
- hash.to_string(Base32, true)
+ expectedHash->to_string(Base::Base32, true),
+ hash.to_string(Base::Base32, true)
).withExitStatus(102)
.debugThrow();
}
diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index f830a4963..b9f828d9a 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -18,8 +18,8 @@ StorePath fetchToStore(
return
settings.readOnlyMode
- ? store.computeStorePathForPath(name, path.path.abs(), method, htSHA256, filter2).first
- : store.addToStore(name, path.path.abs(), method, htSHA256, filter2, repair);
+ ? store.computeStorePathForPath(name, path.path.abs(), method, HashType::SHA256, filter2).first
+ : store.addToStore(name, path.path.abs(), method, HashType::SHA256, filter2, repair);
}
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index e09513d8f..c441ffb12 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -153,12 +153,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
};
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
- input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
+ input.attrs.insert_or_assign("narHash", narHash.to_string(Base::SRI, true));
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
- to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
+ to_string(), tree.actualPath, prevNarHash->to_string(Base::SRI, true), narHash.to_string(Base::SRI, true));
}
if (auto prevLastModified = getLastModified()) {
@@ -240,8 +240,8 @@ std::string Input::getType() const
std::optional<Hash> Input::getNarHash() const
{
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
- auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
- if (hash.type != htSHA256)
+ auto hash = s->empty() ? Hash(HashType::SHA256) : Hash::parseSRI(*s);
+ if (hash.type != HashType::SHA256)
throw UsageError("narHash must use SHA-256");
return hash;
}
@@ -264,7 +264,7 @@ std::optional<Hash> Input::getRev() const
hash = Hash::parseAnyPrefixed(*s);
} catch (BadHash &e) {
// Default to sha1 for backwards compatibility with existing flakes
- hash = Hash::parseAny(*s, htSHA1);
+ hash = Hash::parseAny(*s, HashType::SHA1);
}
}
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 8e3165ff6..7d16d3f57 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -49,7 +49,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
Path getCachePath(std::string_view key)
{
return getCacheDir() + "/nix/gitv3/" +
- hashString(htSHA256, key).to_string(Base32, false);
+ hashString(HashType::SHA256, key).to_string(Base::Base32, false);
}
// Returns the name of the HEAD branch.
@@ -238,7 +238,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
return files.count(file);
};
- auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
+ auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashType::SHA256, filter);
// FIXME: maybe we should use the timestamp of the last
// modified dirty file?
@@ -437,8 +437,8 @@ struct GitInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
- if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
- throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
+ if (hash.has_value() && !(hash->type == HashType::SHA1 || hash->type == HashType::SHA256))
+ throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base::Base16, true));
};
auto getLockedAttrs = [&]()
@@ -501,7 +501,7 @@ struct GitInputScheme : InputScheme
if (!input.getRev())
input.attrs.insert_or_assign("rev",
- Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev());
+ Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), HashType::SHA1).gitRev());
repoDir = actualUrl;
} else {
@@ -521,7 +521,7 @@ struct GitInputScheme : InputScheme
}
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
- auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
+ auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashType::SHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
@@ -599,7 +599,7 @@ struct GitInputScheme : InputScheme
}
if (!input.getRev())
- input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
+ input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashType::SHA1).gitRev());
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
}
@@ -695,7 +695,7 @@ struct GitInputScheme : InputScheme
unpackTarfile(*proc.getStdout(), tmpDir);
}
- auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
+ auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, HashType::SHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index b971781ae..32032258a 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -149,7 +149,7 @@ struct GitArchiveInputScheme : InputScheme
auto path = owner + "/" + repo;
assert(!(ref && rev));
if (ref) path += "/" + *ref;
- if (rev) path += "/" + rev->to_string(Base16, false);
+ if (rev) path += "/" + rev->to_string(Base::Base16, false);
return ParsedURL {
.scheme = type(),
.path = path,
@@ -274,7 +274,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
- auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
+ auto rev = Hash::parseAny(std::string { json["sha"] }, HashType::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
@@ -295,7 +295,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
: "https://api.%s/repos/%s/%s/tarball/%s";
const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
- input.getRev()->to_string(Base16, false));
+ input.getRev()->to_string(Base::Base16, false));
return DownloadUrl { url, headers };
}
@@ -347,7 +347,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) {
- auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
+ auto rev = Hash::parseAny(std::string(json[0]["id"]), HashType::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
} else if (json.is_array() && json.size() == 0) {
@@ -367,7 +367,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
- input.getRev()->to_string(Base16, false));
+ input.getRev()->to_string(Base::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
@@ -444,7 +444,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
if(!id)
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
- auto rev = Hash::parseAny(*id, htSHA1);
+ auto rev = Hash::parseAny(*id, HashType::SHA1);
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
return rev;
}
@@ -454,7 +454,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
- input.getRev()->to_string(Base16, false));
+ input.getRev()->to_string(Base::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc
index 8c0176e84..f3912b82f 100644
--- a/src/libfetchers/indirect.cc
+++ b/src/libfetchers/indirect.cc
@@ -22,14 +22,14 @@ struct IndirectInputScheme : InputScheme
if (path.size() == 1) {
} else if (path.size() == 2) {
if (std::regex_match(path[1], revRegex))
- rev = Hash::parseAny(path[1], htSHA1);
+ rev = Hash::parseAny(path[1], HashType::SHA1);
else if (std::regex_match(path[1], refRegex))
ref = path[1];
else
throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]);
} else if (path.size() == 3) {
ref = path[1];
- rev = Hash::parseAny(path[2], htSHA1);
+ rev = Hash::parseAny(path[2], HashType::SHA1);
} else
throw BadURL("GitHub URL '%s' is invalid", url.url);
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index b4150e9df..a1e4c1918 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -198,7 +198,7 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
- auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
+ auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashType::SHA256, filter);
return {std::move(storePath), input};
}
@@ -208,8 +208,8 @@ struct MercurialInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
- if (hash.has_value() && hash->type != htSHA1)
- throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true));
+ if (hash.has_value() && hash->type != HashType::SHA1)
+ throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base::Base16, true));
};
@@ -248,14 +248,14 @@ struct MercurialInputScheme : InputScheme
});
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
- auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
+ auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashType::SHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
}
}
- Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
+ Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false));
/* If this is a commit hash that we already have, we don't
have to pull again. */
@@ -289,7 +289,7 @@ struct MercurialInputScheme : InputScheme
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3);
- input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
+ input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashType::SHA1).gitRev());
auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]);
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index b11665805..4c30193e2 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -72,7 +72,7 @@ DownloadFileResult downloadFile(
} else {
StringSink sink;
sink << dumpString(res.data);
- auto hash = hashString(htSHA256, res.data);
+ auto hash = hashString(HashType::SHA256, res.data);
ValidPathInfo info {
*store,
name,
@@ -81,7 +81,7 @@ DownloadFileResult downloadFile(
.hash = hash,
.references = {},
},
- hashString(htSHA256, sink.s),
+ hashString(HashType::SHA256, sink.s),
};
info.narSize = sink.s.size();
auto source = StringSource { sink.s };
@@ -155,7 +155,7 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
- unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
+ unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({
@@ -238,7 +238,7 @@ struct CurlInputScheme : InputScheme
// NAR hashes are preferred over file hashes since tar/zip
// files don't have a canonical representation.
if (auto narHash = input.getNarHash())
- url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
+ url.query.insert_or_assign("narHash", narHash->to_string(Base::SRI, true));
return url;
}
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 6ab5a0a1c..9b1e22bad 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -128,9 +128,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */
- HashSink fileHashSink { htSHA256 };
+ HashSink fileHashSink { HashType::SHA256 };
std::shared_ptr<FSAccessor> narAccessor;
- HashSink narHashSink { htSHA256 };
+ HashSink narHashSink { HashType::SHA256 };
{
FdSink fileSink(fdTemp.get());
TeeSink teeSinkCompressed { fileSink, fileHashSink };
@@ -150,7 +150,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash;
narInfo->fileSize = fileSize;
- narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
+ narInfo->url = "nar/" + narInfo->fileHash->to_string(Base::Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" :
@@ -288,7 +288,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
{
- if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
+ if (method != FileIngestionMethod::Recursive || hashAlgo != HashType::SHA256)
unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
@@ -425,7 +425,7 @@ StorePath BinaryCacheStore::addTextToStore(
const StorePathSet & references,
RepairFlag repair)
{
- auto textHash = hashString(htSHA256, s);
+ auto textHash = hashString(HashType::SHA256, s);
auto path = makeTextPath(name, TextInfo { { textHash }, references });
if (!repair && isValidPath(path))
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index db380e07c..87a2ef4bc 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -818,8 +818,8 @@ void LocalDerivationGoal::initTmpDir() {
if (passAsFile.find(i.first) == passAsFile.end()) {
env[i.first] = i.second;
} else {
- auto hash = hashString(htSHA256, i.first);
- std::string fn = ".attr-" + hash.to_string(Base32, false);
+ auto hash = hashString(HashType::SHA256, i.first);
+ std::string fn = ".attr-" + hash.to_string(Base::Base32, false);
Path p = tmpDir + "/" + fn;
writeFile(p, rewriteStrings(i.second, inputRewrites));
chownToBuilder(p);
@@ -2147,7 +2147,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string(newInfo0.path.hashPart())}});
}
- HashResult narHashAndSize = hashPath(htSHA256, actualPath);
+ HashResult narHashAndSize = hashPath(HashType::SHA256, actualPath);
newInfo0.narHash = narHashAndSize.first;
newInfo0.narSize = narHashAndSize.second;
@@ -2167,7 +2167,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() });
rewriteOutput(outputRewrites);
- auto narHashAndSize = hashPath(htSHA256, actualPath);
+ auto narHashAndSize = hashPath(HashType::SHA256, actualPath);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs();
@@ -2204,8 +2204,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
BuildError("hash mismatch in fixed-output derivation '%s':\n likely URL: %s\n specified: %s\n got: %s",
worker.store.printStorePath(drvPath),
guessedUrl,
- wanted.to_string(SRI, true),
- got.to_string(SRI, true)));
+ wanted.to_string(Base::SRI, true),
+ got.to_string(Base::SRI, true)));
}
if (!newInfo0.references.empty())
delayedException = std::make_exception_ptr(
@@ -2608,7 +2608,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName)
{
return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
- Hash(htSHA256), outputPathName(drv->name, outputName));
+ Hash(HashType::SHA256), outputPathName(drv->name, outputName));
}
@@ -2616,7 +2616,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path)
{
return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
- Hash(htSHA256), path.name());
+ Hash(HashType::SHA256), path.name());
}
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index 84727a377..04f0575b1 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -580,7 +580,7 @@ bool Worker::pathContentsGood(const StorePath & path)
res = false;
else {
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
- Hash nullHash(htSHA256);
+ Hash nullHash(HashType::SHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}
pathContentsGoodCache.insert_or_assign(path, res);
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index 3a4cdd6e7..4049d1c6c 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -60,7 +60,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
if (!hashedMirror.ends_with("/")) hashedMirror += '/';
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
- fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
+ fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base::Base16, false));
return;
} catch (Error & e) {
debug(e.what());
diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index 811ddbed7..6aa6d598d 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -61,7 +61,7 @@ std::string ContentAddress::render() const
+ makeFileIngestionPrefix(method);
},
}, method.raw)
- + this->hash.to_string(Base32, true);
+ + this->hash.to_string(Base::Base32, true);
}
/**
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index b09645f46..aada43253 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -416,7 +416,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded {
[&](const TextIngestionMethod &) {
- if (hashType != htSHA256)
+ if (hashType != HashType::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType));
// We could stream this by changing Store
@@ -875,7 +875,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
bool repair, dontCheckSigs;
auto path = store->parseStorePath(readString(from));
auto deriver = readString(from);
- auto narHash = Hash::parseAny(readString(from), htSHA256);
+ auto narHash = Hash::parseAny(readString(from), HashType::SHA256);
ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 9d342892d..7f41e6865 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -592,7 +592,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
[&](const DerivationOutput::CAFixed & dof) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.ca.printMethodAlgo());
- s += ','; printUnquotedString(s, dof.ca.hash.to_string(Base16, false));
+ s += ','; printUnquotedString(s, dof.ca.hash.to_string(Base::Base16, false));
},
[&](const DerivationOutput::CAFloating & dof) {
s += ','; printUnquotedString(s, "");
@@ -823,9 +823,9 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
std::map<std::string, Hash> outputHashes;
for (const auto & i : drv.outputs) {
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
- auto hash = hashString(htSHA256, "fixed:out:"
+ auto hash = hashString(HashType::SHA256, "fixed:out:"
+ dof.ca.printMethodAlgo() + ":"
- + dof.ca.hash.to_string(Base16, false) + ":"
+ + dof.ca.hash.to_string(Base::Base16, false) + ":"
+ store.printStorePath(dof.path(store, drv.name, i.first)));
outputHashes.insert_or_assign(i.first, std::move(hash));
}
@@ -870,11 +870,11 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
const auto h = get(res.hashes, outputName);
if (!h)
throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name);
- inputs2[h->to_string(Base16, false)].value.insert(outputName);
+ inputs2[h->to_string(Base::Base16, false)].value.insert(outputName);
}
}
- auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
+ auto hash = hashString(HashType::SHA256, drv.unparse(store, maskOutputs, &inputs2));
std::map<std::string, Hash> outputHashes;
for (const auto & [outputName, _] : drv.outputs) {
@@ -975,7 +975,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
[&](const DerivationOutput::CAFixed & dof) {
out << store.printStorePath(dof.path(store, drv.name, i.first))
<< dof.ca.printMethodAlgo()
- << dof.ca.hash.to_string(Base16, false);
+ << dof.ca.hash.to_string(Base::Base16, false);
},
[&](const DerivationOutput::CAFloating & dof) {
out << ""
@@ -1007,7 +1007,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
std::string hashPlaceholder(const OutputNameView outputName)
{
// FIXME: memoize?
- return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
+ return "/" + hashString(HashType::SHA256, concatStrings("nix-output:", outputName)).to_string(Base::Base32, false);
}
@@ -1199,7 +1199,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
}
-const Hash impureOutputHash = hashString(htSHA256, "impure");
+const Hash impureOutputHash = hashString(HashType::SHA256, "impure");
nlohmann::json DerivationOutput::toJSON(
const Store & store, std::string_view drvName, OutputNameView outputName) const
@@ -1212,7 +1212,7 @@ nlohmann::json DerivationOutput::toJSON(
[&](const DerivationOutput::CAFixed & dof) {
res["path"] = store.printStorePath(dof.path(store, drvName, outputName));
res["hashAlgo"] = dof.ca.printMethodAlgo();
- res["hash"] = dof.ca.hash.to_string(Base16, false);
+ res["hash"] = dof.ca.hash.to_string(Base::Base16, false);
// FIXME print refs?
},
[&](const DerivationOutput::CAFloating & dof) {
diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc
index 7e3f7548d..d6af5c951 100644
--- a/src/libstore/downstream-placeholder.cc
+++ b/src/libstore/downstream-placeholder.cc
@@ -5,7 +5,7 @@ namespace nix {
std::string DownstreamPlaceholder::render() const
{
- return "/" + hash.to_string(Base32, false);
+ return "/" + hash.to_string(Base::Base32, false);
}
@@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
return DownstreamPlaceholder {
- hashString(htSHA256, clearText)
+ hashString(HashType::SHA256, clearText)
};
}
@@ -31,10 +31,10 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
xpSettings.require(Xp::DynamicDerivations);
auto compressed = compressHash(placeholder.hash, 20);
auto clearText = "nix-computed-output:"
- + compressed.to_string(Base32, false)
+ + compressed.to_string(Base::Base32, false)
+ ":" + std::string { outputName };
return DownstreamPlaceholder {
- hashString(htSHA256, clearText)
+ hashString(HashType::SHA256, clearText)
};
}
diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc
index 8f95919b1..2ccb7f213 100644
--- a/src/libstore/export-import.cc
+++ b/src/libstore/export-import.cc
@@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
{
auto info = queryPathInfo(path);
- HashSink hashSink(htSHA256);
+ HashSink hashSink(HashType::SHA256);
TeeSink teeSink(sink, hashSink);
teeSink << narFromPath(path);
@@ -41,7 +41,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
Hash hash = hashSink.currentHash().first;
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
- printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
+ printStorePath(path), info->narHash.to_string(Base::Base32, true), hash.to_string(Base::Base32, true));
teeSink
<< exportMagic
@@ -77,7 +77,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
auto references = CommonProto::Serialise<StorePathSet>::read(*this,
CommonProto::ReadConn { .from = source });
auto deriver = readString(source);
- auto narHash = hashString(htSHA256, saved.s);
+ auto narHash = hashString(HashType::SHA256, saved.s);
ValidPathInfo info { path, narHash };
if (deriver != "")
diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc
index a088d633f..7b4a12dc9 100644
--- a/src/libstore/gc.cc
+++ b/src/libstore/gc.cc
@@ -42,7 +42,7 @@ static void makeSymlink(const Path & link, const Path & target)
void LocalStore::addIndirectRoot(const Path & path)
{
- std::string hash = hashString(htSHA1, path).to_string(Base32, false);
+ std::string hash = hashString(HashType::SHA1, path).to_string(Base::Base32, false);
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
makeSymlink(realRoot, path);
}
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index 032611f7c..5650282d6 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -183,7 +183,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
<< ServeProto::Command::AddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
- << info.narHash.to_string(Base16, false);
+ << info.narHash.to_string(Base::Base16, false);
conn->to << ServeProto::write(*this, *conn, info.references);
conn->to
<< info.registrationTime
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index f09d1bdab..8ea335551 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -842,7 +842,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmts->RegisterValidPath.use()
(printStorePath(info.path))
- (info.narHash.to_string(Base16, true))
+ (info.narHash.to_string(Base::Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0)
@@ -945,7 +945,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{
state.stmts->UpdatePathInfo.use()
(info.narSize, info.narSize != 0)
- (info.narHash.to_string(Base16, true))
+ (info.narHash.to_string(Base::Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca)
@@ -1123,7 +1123,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths;
for (auto & [_, i] : infos) {
- assert(i.narHash.type == htSHA256);
+ assert(i.narHash.type == HashType::SHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
else
@@ -1241,7 +1241,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
/* While restoring the path from the NAR, compute the hash
of the NAR. */
- HashSink hashSink(htSHA256);
+ HashSink hashSink(HashType::SHA256);
TeeSource wrapperSource { source, hashSink };
@@ -1252,7 +1252,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
- printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
+ printStorePath(info.path), info.narHash.to_string(Base::Base32, true), hashResult.first.to_string(Base::Base32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
@@ -1268,8 +1268,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (specified.hash != actualHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
- specified.hash.to_string(Base32, true),
- actualHash.hash.to_string(Base32, true));
+ specified.hash.to_string(Base::Base32, true),
+ actualHash.hash.to_string(Base::Base32, true));
}
}
@@ -1404,8 +1404,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
/* For computing the nar hash. In recursive SHA-256 mode, this
is the same as the store hash, so no need to do it again. */
auto narHash = std::pair { hash, size };
- if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
- HashSink narSink { htSHA256 };
+ if (method != FileIngestionMethod::Recursive || hashAlgo != HashType::SHA256) {
+ HashSink narSink { HashType::SHA256 };
narSink << dumpPath(realPath);
narHash = narSink.finish();
}
@@ -1436,7 +1436,7 @@ StorePath LocalStore::addTextToStore(
std::string_view s,
const StorePathSet & references, RepairFlag repair)
{
- auto hash = hashString(htSHA256, s);
+ auto hash = hashString(HashType::SHA256, s);
auto dstPath = makeTextPath(name, TextInfo {
.hash = hash,
.references = references,
@@ -1462,7 +1462,7 @@ StorePath LocalStore::addTextToStore(
StringSink sink;
sink << dumpString(s);
- auto narHash = hashString(htSHA256, sink.s);
+ auto narHash = hashString(HashType::SHA256, sink.s);
optimisePath(realPath, repair);
@@ -1573,7 +1573,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
for (auto & link : readDirectory(linksDir)) {
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
Path linkPath = linksDir + "/" + link.name;
- std::string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
+ std::string hash = hashPath(HashType::SHA256, linkPath).first.to_string(Base::Base32, false);
if (hash != link.name) {
printError("link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash);
@@ -1590,7 +1590,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printInfo("checking store hashes...");
- Hash nullHash(htSHA256);
+ Hash nullHash(HashType::SHA256);
for (auto & i : validPaths) {
try {
@@ -1606,7 +1606,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
if (info->narHash != nullHash && info->narHash != current.first) {
printError("path '%s' was modified! expected hash '%s', got '%s'",
- printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true));
+ printStorePath(i), info->narHash.to_string(Base::Base32, true), current.first.to_string(Base::Base32, true));
if (repair) repairPath(i); else errors = true;
} else {
diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc
index 4e6d9dfe5..abb6e9889 100644
--- a/src/libstore/make-content-addressed.cc
+++ b/src/libstore/make-content-addressed.cc
@@ -45,7 +45,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
auto narModuloHash = [&] {
StringSource source{sink.s};
- return computeHashModulo(htSHA256, oldHashPart, source).first;
+ return computeHashModulo(HashType::SHA256, oldHashPart, source).first;
}();
ValidPathInfo info {
@@ -63,7 +63,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
const auto rewritten = rewriteStrings(sink.s, {{oldHashPart, std::string(info.path.hashPart())}});
- info.narHash = hashString(htSHA256, rewritten);
+ info.narHash = hashString(HashType::SHA256, rewritten);
info.narSize = sink.s.size();
StringSource source(rewritten);
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 169b63819..5c0bb17b9 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -334,9 +334,9 @@ public:
(std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0)
- (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
+ (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base::Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
- (info->narHash.to_string(Base32, true))
+ (info->narHash.to_string(Base::Base32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc
index d17253741..e557b4677 100644
--- a/src/libstore/nar-info.cc
+++ b/src/libstore/nar-info.cc
@@ -104,11 +104,11 @@ std::string NarInfo::to_string(const Store & store) const
res += "URL: " + url + "\n";
assert(compression != "");
res += "Compression: " + compression + "\n";
- assert(fileHash && fileHash->type == htSHA256);
- res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
+ assert(fileHash && fileHash->type == HashType::SHA256);
+ res += "FileHash: " + fileHash->to_string(Base::Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
- assert(narHash.type == htSHA256);
- res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
+ assert(narHash.type == HashType::SHA256);
+ res += "NarHash: " + narHash.to_string(Base::Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 4f02296c3..9c871b78f 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -2,14 +2,11 @@
#include "globals.hh"
#include "signals.hh"
-#include <cstdlib>
#include <cstring>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
-#include <stdio.h>
-#include <regex>
namespace nix {
@@ -145,17 +142,17 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Also note that if `path' is a symlink, then we're hashing the
contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */
- Hash hash = hashPath(htSHA256, path).first;
- debug("'%1%' has hash '%2%'", path, hash.to_string(Base32, true));
+ Hash hash = hashPath(HashType::SHA256, path).first;
+ debug("'%1%' has hash '%2%'", path, hash.to_string(Base::Base32, true));
/* Check if this is a known hash. */
- Path linkPath = linksDir + "/" + hash.to_string(Base32, false);
+ Path linkPath = linksDir + "/" + hash.to_string(Base::Base32, false);
/* Maybe delete the link, if it has been corrupted. */
if (pathExists(linkPath)) {
auto stLink = lstat(linkPath);
if (st.st_size != stLink.st_size
- || (repair && hash != hashPath(htSHA256, linkPath).first))
+ || (repair && hash != hashPath(HashType::SHA256, linkPath).first))
{
// XXX: Consider overwriting linkPath with our valid version.
warn("removing corrupted link '%s'", linkPath);
diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc
index 336a9fcfc..4dc2823ce 100644
--- a/src/libstore/path-info.cc
+++ b/src/libstore/path-info.cc
@@ -29,7 +29,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
- + narHash.to_string(Base32, true) + ";"
+ + narHash.to_string(Base::Base32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}
diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc
index 9ba95f706..d2ed85e52 100644
--- a/src/libstore/path-references.cc
+++ b/src/libstore/path-references.cc
@@ -46,7 +46,7 @@ std::pair<StorePathSet, HashResult> scanForReferences(
const std::string & path,
const StorePathSet & refs)
{
- HashSink hashSink { htSHA256 };
+ HashSink hashSink { HashType::SHA256 };
auto found = scanForReferences(hashSink, path, refs);
auto hash = hashSink.finish();
return std::pair<StorePathSet, HashResult>(found, hash);
diff --git a/src/libstore/path.cc b/src/libstore/path.cc
index d029e986b..d4b5fc0dc 100644
--- a/src/libstore/path.cc
+++ b/src/libstore/path.cc
@@ -46,7 +46,7 @@ StorePath::StorePath(std::string_view _baseName)
}
StorePath::StorePath(const Hash & hash, std::string_view _name)
- : baseName((hash.to_string(Base32, false) + "-").append(std::string(_name)))
+ : baseName((hash.to_string(Base::Base32, false) + "-").append(std::string(_name)))
{
checkName(baseName, name());
}
@@ -60,7 +60,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
StorePath StorePath::random(std::string_view name)
{
- Hash hash(htSHA1);
+ Hash hash(HashType::SHA1);
randombytes_buf(hash.hash, hash.hashSize);
return StorePath(hash, name);
}
diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh
index 559483ce3..f2b228fa0 100644
--- a/src/libstore/realisation.hh
+++ b/src/libstore/realisation.hh
@@ -39,7 +39,7 @@ struct DrvOutput {
std::string to_string() const;
std::string strHash() const
- { return drvHash.to_string(Base16, true); }
+ { return drvHash.to_string(Base::Base16, true); }
static DrvOutput parse(const std::string &);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 1b0524316..36223051b 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -397,7 +397,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
std::visit(overloaded {
[&](const TextIngestionMethod & thm) -> void {
- if (hashType != htSHA256)
+ if (hashType != HashType::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType));
std::string s = dump.drain();
@@ -409,7 +409,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to
<< WorkerProto::Op::AddToStore
<< name
- << ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
+ << ((hashType == HashType::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashType);
@@ -461,7 +461,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << WorkerProto::Op::AddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
- << info.narHash.to_string(Base16, false);
+ << info.narHash.to_string(Base::Base16, false);
conn->to << WorkerProto::write(*this, *conn, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
@@ -533,7 +533,7 @@ StorePath RemoteStore::addTextToStore(
RepairFlag repair)
{
StringSource source(s);
- return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path;
+ return addCAToStore(source, name, TextIngestionMethod {}, HashType::SHA256, references, repair)->path;
}
void RemoteStore::registerDrvOutput(const Realisation & info)
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 264869df6..5553d84f0 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -84,7 +84,7 @@ public:
* Add a content-addressable store path. Does not support references. `dump` will be drained.
*/
StorePath addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
+ FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override;
diff --git a/src/libstore/serve-protocol.cc b/src/libstore/serve-protocol.cc
index 723a494a5..d752bdecd 100644
--- a/src/libstore/serve-protocol.cc
+++ b/src/libstore/serve-protocol.cc
@@ -88,7 +88,7 @@ WireFormatGenerator ServeProto::Serialise<UnkeyedValidPathInfo>::write(const Sto
co_yield info.narSize; // downloadSize, lie a little
co_yield info.narSize;
if (GET_PROTOCOL_MINOR(conn.version) >= 4) {
- co_yield info.narHash.to_string(Base32, true);
+ co_yield info.narHash.to_string(Base::Base32, true);
co_yield renderContentAddress(info.ca);
co_yield info.sigs;
}
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index cb027d311..6d9fec41b 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -156,7 +156,7 @@ StorePath Store::makeStorePath(std::string_view type,
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
auto s = std::string(type) + ":" + std::string(hash)
+ ":" + storeDir + ":" + std::string(name);
- auto h = compressHash(hashString(htSHA256, s), 20);
+ auto h = compressHash(hashString(HashType::SHA256, s), 20);
return StorePath(h, name);
}
@@ -164,7 +164,7 @@ StorePath Store::makeStorePath(std::string_view type,
StorePath Store::makeStorePath(std::string_view type,
const Hash & hash, std::string_view name) const
{
- return makeStorePath(type, hash.to_string(Base16, true), name);
+ return makeStorePath(type, hash.to_string(Base::Base16, true), name);
}
@@ -194,7 +194,7 @@ static std::string makeType(
StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
{
- if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) {
+ if (info.hash.type == HashType::SHA256 && info.method == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
} else {
if (!info.references.empty()) {
@@ -202,10 +202,10 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf
name);
}
return makeStorePath("output:out",
- hashString(htSHA256,
+ hashString(HashType::SHA256,
"fixed:out:"
+ makeFileIngestionPrefix(info.method)
- + info.hash.to_string(Base16, true) + ":"),
+ + info.hash.to_string(Base::Base16, true) + ":"),
name);
}
}
@@ -213,7 +213,7 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf
StorePath Store::makeTextPath(std::string_view name, const TextInfo & info) const
{
- assert(info.hash.type == htSHA256);
+ assert(info.hash.type == HashType::SHA256);
return makeStorePath(
makeType(*this, "text", StoreReferences {
.others = info.references,
@@ -259,7 +259,7 @@ StorePath Store::computeStorePathForText(
const StorePathSet & references) const
{
return makeTextPath(name, TextInfo {
- .hash = hashString(htSHA256, s),
+ .hash = hashString(HashType::SHA256, s),
.references = references,
});
}
@@ -407,7 +407,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
std::optional<Hash> expectedCAHash)
{
- HashSink narHashSink { htSHA256 };
+ HashSink narHashSink { HashType::SHA256 };
HashSink caHashSink { hashAlgo };
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
@@ -416,7 +416,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
RetrieveRegularNARSink fileSink { caHashSink };
TeeSink unusualHashTee { narHashSink, caHashSink };
- auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
+ auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashType::SHA256
? static_cast<Sink &>(unusualHashTee)
: narHashSink;
@@ -442,7 +442,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
finish. */
auto [narHash, narSize] = narHashSink.finish();
- auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
+ auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashType::SHA256
? narHash
: caHashSink.finish().first;
@@ -854,7 +854,7 @@ std::string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i);
if (showHash) {
- s += info->narHash.to_string(Base16, false) + "\n";
+ s += info->narHash.to_string(Base::Base16, false) + "\n";
s += fmt("%1%\n", info->narSize);
}
@@ -1257,7 +1257,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
if (!hashGiven) {
std::string s;
getline(str, s);
- auto narHash = Hash::parseAny(s, htSHA256);
+ auto narHash = Hash::parseAny(s, HashType::SHA256);
getline(str, s);
auto narSize = string2Int<uint64_t>(s);
if (!narSize) throw Error("number expected");
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 91a8ef2ca..25bc0c823 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -311,7 +311,7 @@ public:
*/
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
- HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
+ HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter) const;
/**
* Preparatory part of addTextToStore().
@@ -524,7 +524,7 @@ public:
std::string_view name,
const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive,
- HashType hashAlgo = htSHA256,
+ HashType hashAlgo = HashType::SHA256,
PathFilter & filter = defaultPathFilter,
RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet());
@@ -535,7 +535,7 @@ public:
* memory.
*/
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
+ FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256,
std::optional<Hash> expectedCAHash = {});
/**
@@ -548,7 +548,7 @@ public:
* \todo remove?
*/
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
+ FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet())
{ unsupported("addToStoreFromDump"); }
@@ -679,7 +679,7 @@ public:
*/
nlohmann::json pathInfoToJSON(const StorePathSet & storePaths,
bool includeImpureInfo, bool showClosureSize,
- Base hashBase = Base32,
+ Base hashBase = Base::Base32,
AllowInvalidFlag allowInvalid = DisallowInvalid);
/**
diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc
index f85fba0ff..b9189df2b 100644
--- a/src/libstore/worker-protocol.cc
+++ b/src/libstore/worker-protocol.cc
@@ -158,7 +158,7 @@ WireFormatGenerator WorkerProto::Serialise<ValidPathInfo>::write(const Store & s
UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const Store & store, ReadConn conn)
{
auto deriver = readString(conn.from);
- auto narHash = Hash::parseAny(readString(conn.from), htSHA256);
+ auto narHash = Hash::parseAny(readString(conn.from), HashType::SHA256);
UnkeyedValidPathInfo info(narHash);
if (deriver != "") info.deriver = store.parseStorePath(deriver);
info.references = WorkerProto::Serialise<StorePathSet>::read(store, conn);
@@ -174,7 +174,7 @@ UnkeyedValidPathInfo WorkerProto::Serialise<UnkeyedValidPathInfo>::read(const St
WireFormatGenerator WorkerProto::Serialise<UnkeyedValidPathInfo>::write(const Store & store, WriteConn conn, const UnkeyedValidPathInfo & pathInfo)
{
co_yield (pathInfo.deriver ? store.printStorePath(*pathInfo.deriver) : "");
- co_yield pathInfo.narHash.to_string(Base16, false);
+ co_yield pathInfo.narHash.to_string(Base::Base16, false);
co_yield WorkerProto::write(store, conn, pathInfo.references);
co_yield pathInfo.registrationTime;
co_yield pathInfo.narSize;
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 71f8f88fa..5fdbaba7e 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -15,7 +15,7 @@
namespace nix {
-enum HashType : char;
+enum class HashType : char;
class MultiCommand;
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index 46d5425ee..9eb332e78 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -18,10 +18,10 @@ namespace nix {
static size_t regularHashSize(HashType type) {
switch (type) {
- case htMD5: return md5HashSize;
- case htSHA1: return sha1HashSize;
- case htSHA256: return sha256HashSize;
- case htSHA512: return sha512HashSize;
+ case HashType::MD5: return md5HashSize;
+ case HashType::SHA1: return sha1HashSize;
+ case HashType::SHA256: return sha256HashSize;
+ case HashType::SHA512: return sha512HashSize;
}
abort();
}
@@ -109,33 +109,33 @@ static std::string printHash32(const Hash & hash)
std::string printHash16or32(const Hash & hash)
{
- return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
+ return hash.to_string(hash.type == HashType::MD5 ? Base::Base16 : Base::Base32, false);
}
std::string Hash::to_string(Base base, bool includeType) const
{
std::string s;
- if (base == SRI || includeType) {
+ if (base == Base::SRI || includeType) {
s += printHashType(type);
- s += base == SRI ? '-' : ':';
+ s += base == Base::SRI ? '-' : ':';
}
switch (base) {
- case Base16:
+ case Base::Base16:
s += printHash16(*this);
break;
- case Base32:
+ case Base::Base32:
s += printHash32(*this);
break;
- case Base64:
- case SRI:
+ case Base::Base64:
+ case Base::SRI:
s += base64Encode(std::string_view((const char *) hash, hashSize));
break;
}
return s;
}
-Hash Hash::dummy(htSHA256);
+Hash Hash::dummy(HashType::SHA256);
Hash Hash::parseSRI(std::string_view original) {
auto rest = original;
@@ -265,7 +265,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
if (!ht)
throw BadHash("empty hash requires explicit hash type");
Hash h(*ht);
- warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
+ warn("found empty hash, assuming '%s'", h.to_string(Base::SRI, true));
return h;
} else
return Hash::parseAny(hashStr, ht);
@@ -283,29 +283,29 @@ union Ctx
static void start(HashType ht, Ctx & ctx)
{
- if (ht == htMD5) MD5_Init(&ctx.md5);
- else if (ht == htSHA1) SHA1_Init(&ctx.sha1);
- else if (ht == htSHA256) SHA256_Init(&ctx.sha256);
- else if (ht == htSHA512) SHA512_Init(&ctx.sha512);
+ if (ht == HashType::MD5) MD5_Init(&ctx.md5);
+ else if (ht == HashType::SHA1) SHA1_Init(&ctx.sha1);
+ else if (ht == HashType::SHA256) SHA256_Init(&ctx.sha256);
+ else if (ht == HashType::SHA512) SHA512_Init(&ctx.sha512);
}
static void update(HashType ht, Ctx & ctx,
std::string_view data)
{
- if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size());
- else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
- else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
- else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
+ if (ht == HashType::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
+ else if (ht == HashType::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
+ else if (ht == HashType::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
+ else if (ht == HashType::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
}
static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
{
- if (ht == htMD5) MD5_Final(hash, &ctx.md5);
- else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1);
- else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256);
- else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512);
+ if (ht == HashType::MD5) MD5_Final(hash, &ctx.md5);
+ else if (ht == HashType::SHA1) SHA1_Final(hash, &ctx.sha1);
+ else if (ht == HashType::SHA256) SHA256_Final(hash, &ctx.sha256);
+ else if (ht == HashType::SHA512) SHA512_Final(hash, &ctx.sha512);
}
@@ -386,10 +386,10 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
std::optional<HashType> parseHashTypeOpt(std::string_view s)
{
- if (s == "md5") return htMD5;
- else if (s == "sha1") return htSHA1;
- else if (s == "sha256") return htSHA256;
- else if (s == "sha512") return htSHA512;
+ if (s == "md5") return HashType::MD5;
+ else if (s == "sha1") return HashType::SHA1;
+ else if (s == "sha256") return HashType::SHA256;
+ else if (s == "sha512") return HashType::SHA512;
else return std::optional<HashType> {};
}
@@ -405,10 +405,10 @@ HashType parseHashType(std::string_view s)
std::string_view printHashType(HashType ht)
{
switch (ht) {
- case htMD5: return "md5";
- case htSHA1: return "sha1";
- case htSHA256: return "sha256";
- case htSHA512: return "sha512";
+ case HashType::MD5: return "md5";
+ case HashType::SHA1: return "sha1";
+ case HashType::SHA256: return "sha256";
+ case HashType::SHA512: return "sha512";
default:
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 1c2f8493b..47e970e17 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -12,7 +12,7 @@ namespace nix {
MakeError(BadHash, Error);
-enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
+enum class HashType : char { MD5 = 42, SHA1, SHA256, SHA512 };
const int md5HashSize = 16;
@@ -24,7 +24,7 @@ extern std::set<std::string> hashTypes;
extern const std::string base32Chars;
-enum Base : int { Base64, Base32, Base16, SRI };
+enum class Base : int { Base64, Base32, Base16, SRI };
struct Hash
@@ -119,12 +119,12 @@ public:
std::string gitRev() const
{
- return to_string(Base16, false);
+ return to_string(Base::Base16, false);
}
std::string gitShortRev() const
{
- return std::string(to_string(Base16, false), 0, 7);
+ return std::string(to_string(Base::Base16, false), 0, 7);
}
static Hash dummy;
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 8d9a49536..5762b0644 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -404,8 +404,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j);
if (query == qHash) {
- assert(info->narHash.type == htSHA256);
- cout << fmt("%s\n", info->narHash.to_string(Base32, true));
+ assert(info->narHash.type == HashType::SHA256);
+ cout << fmt("%s\n", info->narHash.to_string(Base::Base32, true));
} else if (query == qSize)
cout << fmt("%d\n", info->narSize);
}
@@ -540,7 +540,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
if (canonicalise)
canonicalisePathMetaData(store->printStorePath(info->path), {});
if (!hashGiven) {
- HashResult hash = hashPath(htSHA256, store->printStorePath(info->path));
+ HashResult hash = hashPath(HashType::SHA256, store->printStorePath(info->path));
info->narHash = hash.first;
info->narSize = hash.second;
}
@@ -768,8 +768,8 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
if (current.first != info->narHash) {
printError("path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path),
- info->narHash.to_string(Base32, true),
- current.first.to_string(Base32, true));
+ info->narHash.to_string(Base::Base32, true),
+ current.first.to_string(Base::Base32, true));
status = 1;
}
}
@@ -970,7 +970,7 @@ static void opServe(Strings opFlags, Strings opArgs)
auto deriver = readString(in);
ValidPathInfo info {
store->parseStorePath(path),
- Hash::parseAny(readString(in), htSHA256),
+ Hash::parseAny(readString(in), HashType::SHA256),
};
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index 7e16e12c3..b9f7ece89 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -32,11 +32,11 @@ struct CmdAddToStore : MixDryRun, StoreCommand
StringSink sink;
sink << dumpPath(path);
- auto narHash = hashString(htSHA256, sink.s);
+ auto narHash = hashString(HashType::SHA256, sink.s);
Hash hash = narHash;
if (ingestionMethod == FileIngestionMethod::Flat) {
- HashSink hsink(htSHA256);
+ HashSink hsink(HashType::SHA256);
hsink << readFileSource(path);
hash = hsink.finish().first;
}
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 672930342..5ea9e077b 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -226,7 +226,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
if (auto rev = flake.lockedRef.input.getRev())
- j["revision"] = rev->to_string(Base16, false);
+ j["revision"] = rev->to_string(Base::Base16, false);
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
j["dirtyRevision"] = *dirtyRev;
if (auto revCount = flake.lockedRef.input.getRevCount())
@@ -253,7 +253,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
if (auto rev = flake.lockedRef.input.getRev())
logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
- rev->to_string(Base16, false));
+ rev->to_string(Base::Base16, false));
if (auto dirtyRev = fetchers::maybeGetStrAttr(flake.lockedRef.toAttrs(), "dirtyRev"))
logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
@@ -1434,13 +1434,13 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON
if (json) {
auto res = nlohmann::json::object();
res["storePath"] = store->printStorePath(tree.storePath);
- res["hash"] = hash.to_string(SRI, true);
+ res["hash"] = hash.to_string(Base::SRI, true);
logger->cout(res.dump());
} else {
notice("Downloaded '%s' to '%s' (hash '%s').",
lockedRef.to_string(),
store->printStorePath(tree.storePath),
- hash.to_string(SRI, true));
+ hash.to_string(Base::SRI, true));
}
}
};
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 12b66a83c..f6add527a 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -11,9 +11,9 @@ using namespace nix;
struct CmdHashBase : Command
{
FileIngestionMethod mode;
- Base base = SRI;
+ Base base = Base::SRI;
bool truncate = false;
- HashType ht = htSHA256;
+ HashType ht = HashType::SHA256;
std::vector<std::string> paths;
std::optional<std::string> modulus;
@@ -22,25 +22,25 @@ struct CmdHashBase : Command
addFlag({
.longName = "sri",
.description = "Print the hash in SRI format.",
- .handler = {&base, SRI},
+ .handler = {&base, Base::SRI},
});
addFlag({
.longName = "base64",
.description = "Print the hash in base-64 format.",
- .handler = {&base, Base64},
+ .handler = {&base, Base::Base64},
});
addFlag({
.longName = "base32",
.description = "Print the hash in base-32 (Nix-specific) format.",
- .handler = {&base, Base32},
+ .handler = {&base, Base::Base32},
});
addFlag({
.longName = "base16",
.description = "Print the hash in base-16 format.",
- .handler = {&base, Base16},
+ .handler = {&base, Base::Base16},
});
addFlag(Flag::mkHashTypeFlag("type", &ht));
@@ -90,7 +90,7 @@ struct CmdHashBase : Command
? computeHashModulo(ht, *modulus, source).first
: hashSource(ht, source).first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
- logger->cout(h.to_string(base, base == SRI));
+ logger->cout(h.to_string(base, base == Base::SRI));
}
}
};
@@ -110,16 +110,16 @@ struct CmdToBase : Command
std::string description() override
{
return fmt("convert a hash to %s representation",
- base == Base16 ? "base-16" :
- base == Base32 ? "base-32" :
- base == Base64 ? "base-64" :
+ base == Base::Base16 ? "base-16" :
+ base == Base::Base32 ? "base-32" :
+ base == Base::Base64 ? "base-64" :
"SRI");
}
void run() override
{
for (auto s : args)
- logger->cout(Hash::parseAny(s, ht).to_string(base, base == SRI));
+ logger->cout(Hash::parseAny(s, ht).to_string(base, base == Base::SRI));
}
};
@@ -129,10 +129,10 @@ struct CmdHash : NixMultiCommand
: MultiCommand({
{"file", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Flat);; }},
{"path", []() { return make_ref<CmdHashBase>(FileIngestionMethod::Recursive); }},
- {"to-base16", []() { return make_ref<CmdToBase>(Base16); }},
- {"to-base32", []() { return make_ref<CmdToBase>(Base32); }},
- {"to-base64", []() { return make_ref<CmdToBase>(Base64); }},
- {"to-sri", []() { return make_ref<CmdToBase>(SRI); }},
+ {"to-base16", []() { return make_ref<CmdToBase>(Base::Base16); }},
+ {"to-base32", []() { return make_ref<CmdToBase>(Base::Base32); }},
+ {"to-base64", []() { return make_ref<CmdToBase>(Base::Base64); }},
+ {"to-sri", []() { return make_ref<CmdToBase>(Base::SRI); }},
})
{ }
@@ -158,7 +158,7 @@ static int compatNixHash(int argc, char * * argv)
{
std::optional<HashType> ht;
bool flat = false;
- Base base = Base16;
+ Base base = Base::Base16;
bool truncate = false;
enum { opHash, opTo } op = opHash;
std::vector<std::string> ss;
@@ -169,10 +169,10 @@ static int compatNixHash(int argc, char * * argv)
else if (*arg == "--version")
printVersion("nix-hash");
else if (*arg == "--flat") flat = true;
- else if (*arg == "--base16") base = Base16;
- else if (*arg == "--base32") base = Base32;
- else if (*arg == "--base64") base = Base64;
- else if (*arg == "--sri") base = SRI;
+ else if (*arg == "--base16") base = Base::Base16;
+ else if (*arg == "--base32") base = Base::Base32;
+ else if (*arg == "--base64") base = Base::Base64;
+ else if (*arg == "--sri") base = Base::SRI;
else if (*arg == "--truncate") truncate = true;
else if (*arg == "--type") {
std::string s = getArg(*arg, arg, end);
@@ -180,19 +180,19 @@ static int compatNixHash(int argc, char * * argv)
}
else if (*arg == "--to-base16") {
op = opTo;
- base = Base16;
+ base = Base::Base16;
}
else if (*arg == "--to-base32") {
op = opTo;
- base = Base32;
+ base = Base::Base32;
}
else if (*arg == "--to-base64") {
op = opTo;
- base = Base64;
+ base = Base::Base64;
}
else if (*arg == "--to-sri") {
op = opTo;
- base = SRI;
+ base = Base::SRI;
}
else if (*arg != "" && arg->at(0) == '-')
return false;
@@ -203,7 +203,7 @@ static int compatNixHash(int argc, char * * argv)
if (op == opHash) {
CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
- if (!ht.has_value()) ht = htMD5;
+ if (!ht.has_value()) ht = HashType::MD5;
cmd.ht = ht.value();
cmd.base = base;
cmd.truncate = truncate;
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index 613c5b191..b14eef467 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -90,7 +90,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << store->pathInfoToJSON(
// FIXME: preserve order?
StorePathSet(storePaths.begin(), storePaths.end()),
- true, showClosureSize, SRI, AllowInvalid).dump();
+ true, showClosureSize, Base::SRI, AllowInvalid).dump();
}
else {
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index 13d94d645..0b04a04e6 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -133,7 +133,7 @@ std::tuple<StorePath, Hash> prefetchFile(
static int main_nix_prefetch_url(int argc, char * * argv)
{
{
- HashType ht = htSHA256;
+ HashType ht = HashType::SHA256;
std::vector<std::string> args;
bool printPath = getEnv("PRINT_PATH") == "1";
bool fromExpr = false;
@@ -256,7 +256,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
bool executable = false;
bool unpack = false;
std::optional<std::string> name;
- HashType hashType = htSHA256;
+ HashType hashType = HashType::SHA256;
std::optional<Hash> expectedHash;
CmdStorePrefetchFile()
@@ -316,13 +316,13 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
if (json) {
auto res = nlohmann::json::object();
res["storePath"] = store->printStorePath(storePath);
- res["hash"] = hash.to_string(SRI, true);
+ res["hash"] = hash.to_string(Base::SRI, true);
logger->cout(res.dump());
} else {
notice("Downloaded '%s' to '%s' (hash '%s').",
url,
store->printStorePath(storePath),
- hash.to_string(SRI, true));
+ hash.to_string(Base::SRI, true));
}
}
};
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index eb68e67bc..8783d4e04 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -109,8 +109,8 @@ struct CmdVerify : StorePathsCommand
act2.result(resCorruptedPath, store->printStorePath(info->path));
printError("path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path),
- info->narHash.to_string(Base32, true),
- hash.first.to_string(Base32, true));
+ info->narHash.to_string(Base::Base32, true),
+ hash.first.to_string(Base::Base32, true));
}
}
diff --git a/tests/unit/libstore/common-protocol.cc b/tests/unit/libstore/common-protocol.cc
index 86ba537db..9523c9acb 100644
--- a/tests/unit/libstore/common-protocol.cc
+++ b/tests/unit/libstore/common-protocol.cc
@@ -102,15 +102,15 @@ CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress {
.method = TextIngestionMethod {},
- .hash = hashString(HashType::htSHA256, "Derive(...)"),
+ .hash = hashString(HashType::SHA256, "Derive(...)"),
},
ContentAddress {
.method = FileIngestionMethod::Flat,
- .hash = hashString(HashType::htSHA1, "blob blob..."),
+ .hash = hashString(HashType::SHA1, "blob blob..."),
},
ContentAddress {
.method = FileIngestionMethod::Recursive,
- .hash = hashString(HashType::htSHA256, "(...)"),
+ .hash = hashString(HashType::SHA256, "(...)"),
},
}))
@@ -197,7 +197,7 @@ CHARACTERIZATION_TEST(
std::optional {
ContentAddress {
.method = FileIngestionMethod::Flat,
- .hash = hashString(HashType::htSHA1, "blob blob..."),
+ .hash = hashString(HashType::SHA1, "blob blob..."),
},
},
}))
diff --git a/tests/unit/libstore/derivation.cc b/tests/unit/libstore/derivation.cc
index ca0cdff71..f6401a095 100644
--- a/tests/unit/libstore/derivation.cc
+++ b/tests/unit/libstore/derivation.cc
@@ -148,7 +148,7 @@ TEST_JSON(DynDerivationTest, caFixedText,
TEST_JSON(CaDerivationTest, caFloating,
(DerivationOutput::CAFloating {
.method = FileIngestionMethod::Recursive,
- .hashType = htSHA256,
+ .hashType = HashType::SHA256,
}),
"drv-name", "output-name")
@@ -159,7 +159,7 @@ TEST_JSON(DerivationTest, deferred,
TEST_JSON(ImpureDerivationTest, impure,
(DerivationOutput::Impure {
.method = FileIngestionMethod::Recursive,
- .hashType = htSHA256,
+ .hashType = HashType::SHA256,
}),
"drv-name", "output-name")
diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc
index 6cc60ca3e..c6793be74 100644
--- a/tests/unit/libstore/serve-protocol.cc
+++ b/tests/unit/libstore/serve-protocol.cc
@@ -53,15 +53,15 @@ VERSIONED_CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress {
.method = TextIngestionMethod {},
- .hash = hashString(HashType::htSHA256, "Derive(...)"),
+ .hash = hashString(HashType::SHA256, "Derive(...)"),
},
ContentAddress {
.method = FileIngestionMethod::Flat,
- .hash = hashString(HashType::htSHA1, "blob blob..."),
+ .hash = hashString(HashType::SHA1, "blob blob..."),
},
ContentAddress {
.method = FileIngestionMethod::Recursive,
- .hash = hashString(HashType::htSHA256, "(...)"),
+ .hash = hashString(HashType::SHA256, "(...)"),
},
}))
@@ -278,7 +278,7 @@ VERSIONED_CHARACTERIZATION_TEST(
"foo",
FixedOutputInfo {
.method = FileIngestionMethod::Recursive,
- .hash = hashString(HashType::htSHA256, "(...)"),
+ .hash = hashString(HashType::SHA256, "(...)"),
.references = {
.others = {
StorePath {
@@ -348,7 +348,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::optional {
ContentAddress {
.method = FileIngestionMethod::Flat,
- .hash = hashString(HashType::htSHA1, "blob blob..."),
+ .hash = hashString(HashType::SHA1, "blob blob..."),
},
},
}))
diff --git a/tests/unit/libstore/worker-protocol.cc b/tests/unit/libstore/worker-protocol.cc
index f36de9cf9..92a33f595 100644
--- a/tests/unit/libstore/worker-protocol.cc
+++ b/tests/unit/libstore/worker-protocol.cc
@@ -55,15 +55,15 @@ VERSIONED_CHARACTERIZATION_TEST(
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
ContentAddress {
.method = TextIngestionMethod {},
- .hash = hashString(HashType::htSHA256, "Derive(...)"),
+ .hash = hashString(HashType::SHA256, "Derive(...)"),
},
ContentAddress {
.method = FileIngestionMethod::Flat,
- .hash = hashString(HashType::htSHA1, "blob blob..."),
+ .hash = hashString(HashType::SHA1, "blob blob..."),
},
ContentAddress {
.method = FileIngestionMethod::Recursive,
- .hash = hashString(HashType::htSHA256, "(...)"),
+ .hash = hashString(HashType::SHA256, "(...)"),
},
}))
@@ -417,7 +417,7 @@ VERSIONED_CHARACTERIZATION_TEST(
"foo",
FixedOutputInfo {
.method = FileIngestionMethod::Recursive,
- .hash = hashString(HashType::htSHA256, "(...)"),
+ .hash = hashString(HashType::SHA256, "(...)"),
.references = {
.others = {
StorePath {
@@ -492,7 +492,7 @@ VERSIONED_CHARACTERIZATION_TEST(
std::optional {
ContentAddress {
.method = FileIngestionMethod::Flat,
- .hash = hashString(HashType::htSHA1, "blob blob..."),
+ .hash = hashString(HashType::SHA1, "blob blob..."),
},
},
}))
diff --git a/tests/unit/libutil-support/tests/hash.cc b/tests/unit/libutil-support/tests/hash.cc
index 7cc994b40..1ff31ae6a 100644
--- a/tests/unit/libutil-support/tests/hash.cc
+++ b/tests/unit/libutil-support/tests/hash.cc
@@ -11,11 +11,11 @@ using namespace nix;
Gen<Hash> Arbitrary<Hash>::arbitrary()
{
- Hash prototype(htSHA1);
+ Hash prototype(HashType::SHA1);
return
gen::apply(
[](const std::vector<uint8_t> & v) {
- Hash hash(htSHA1);
+ Hash hash(HashType::SHA1);
assert(v.size() == hash.hashSize);
std::copy(v.begin(), v.end(), hash.hash);
return hash;
diff --git a/tests/unit/libutil/hash.cc b/tests/unit/libutil/hash.cc
index 1f40edc95..6c24eda10 100644
--- a/tests/unit/libutil/hash.cc
+++ b/tests/unit/libutil/hash.cc
@@ -13,28 +13,28 @@ namespace nix {
TEST(hashString, testKnownMD5Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s1 = "";
- auto hash = hashString(HashType::htMD5, s1);
+ auto hash = hashString(HashType::MD5, s1);
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
}
TEST(hashString, testKnownMD5Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s2 = "abc";
- auto hash = hashString(HashType::htMD5, s2);
+ auto hash = hashString(HashType::MD5, s2);
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
}
TEST(hashString, testKnownSHA1Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abc";
- auto hash = hashString(HashType::htSHA1, s);
+ auto hash = hashString(HashType::SHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
}
TEST(hashString, testKnownSHA1Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
- auto hash = hashString(HashType::htSHA1, s);
+ auto hash = hashString(HashType::SHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
}
@@ -42,7 +42,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc";
- auto hash = hashString(HashType::htSHA256, s);
+ auto hash = hashString(HashType::SHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
}
@@ -50,7 +50,7 @@ namespace nix {
TEST(hashString, testKnownSHA256Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
- auto hash = hashString(HashType::htSHA256, s);
+ auto hash = hashString(HashType::SHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
}
@@ -58,7 +58,7 @@ namespace nix {
TEST(hashString, testKnownSHA512Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc";
- auto hash = hashString(HashType::htSHA512, s);
+ auto hash = hashString(HashType::SHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
@@ -68,7 +68,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
- auto hash = hashString(HashType::htSHA512, s);
+ auto hash = hashString(HashType::SHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"