diff options
author | Carlo Nucera <carlo.nucera@protonmail.com> | 2020-07-15 17:58:30 -0400 |
---|---|---|
committer | Carlo Nucera <carlo.nucera@protonmail.com> | 2020-07-15 17:58:30 -0400 |
commit | 455bdee2056858b223b39aa86822fd83580038db (patch) | |
tree | cc9ddb46caf5f77d611152a2e95d9639d200c011 /src/libstore/s3-binary-cache-store.cc | |
parent | f1c7746eb407258d77b2f7fffec0e5d4facf516a (diff) | |
parent | 36a124260361ba8dfa43bf43a067dcc48064c93f (diff) |
Merge branch 'master' of github.com:NixOS/nix into derivation-header-include-order
Diffstat (limited to 'src/libstore/s3-binary-cache-store.cc')
-rw-r--r-- | src/libstore/s3-binary-cache-store.cc | 39 |
1 files changed, 22 insertions, 17 deletions
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 427dd48ce..1b7dff085 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -7,7 +7,6 @@ #include "globals.hh" #include "compression.hh" #include "filetransfer.hh" -#include "istringstream_nocopy.hh" #include <aws/core/Aws.h> #include <aws/core/VersionConfig.h> @@ -262,12 +261,11 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore std::shared_ptr<TransferManager> transferManager; std::once_flag transferManagerCreated; - void uploadFile(const std::string & path, const std::string & data, + void uploadFile(const std::string & path, + std::shared_ptr<std::basic_iostream<char>> istream, const std::string & mimeType, const std::string & contentEncoding) { - auto stream = std::make_shared<istringstream_nocopy>(data); - auto maxThreads = std::thread::hardware_concurrency(); static std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor> @@ -307,7 +305,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore std::shared_ptr<TransferHandle> transferHandle = transferManager->UploadFile( - stream, bucketName, path, mimeType, + istream, bucketName, path, mimeType, Aws::Map<Aws::String, Aws::String>(), nullptr /*, contentEncoding */); @@ -333,9 +331,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore if (contentEncoding != "") request.SetContentEncoding(contentEncoding); - auto stream = std::make_shared<istringstream_nocopy>(data); - - request.SetBody(stream); + request.SetBody(istream); auto result = checkAws(fmt("AWS error uploading '%s'", path), s3Helper.client->PutObject(request)); @@ -347,25 +343,34 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1) .count(); - printInfo(format("uploaded 's3://%1%/%2%' (%3% bytes) in %4% ms") % - bucketName % path % data.size() % duration); + auto size = istream->tellg(); + + printInfo("uploaded 's3://%s/%s' (%d bytes) in %d ms", + bucketName, path, size, duration); stats.putTimeMs += duration; - stats.putBytes += data.size(); + stats.putBytes += size; stats.put++; } - void upsertFile(const std::string & path, const std::string & data, + void upsertFile(const std::string & path, + std::shared_ptr<std::basic_iostream<char>> istream, const std::string & mimeType) override { + auto compress = [&](std::string compression) + { + auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain()); + return std::make_shared<std::stringstream>(std::move(*compressed)); + }; + if (narinfoCompression != "" && hasSuffix(path, ".narinfo")) - uploadFile(path, *compress(narinfoCompression, data), mimeType, narinfoCompression); + uploadFile(path, compress(narinfoCompression), mimeType, narinfoCompression); else if (lsCompression != "" && hasSuffix(path, ".ls")) - uploadFile(path, *compress(lsCompression, data), mimeType, lsCompression); + uploadFile(path, compress(lsCompression), mimeType, lsCompression); else if (logCompression != "" && hasPrefix(path, "log/")) - uploadFile(path, *compress(logCompression, data), mimeType, logCompression); + uploadFile(path, compress(logCompression), mimeType, logCompression); else - uploadFile(path, data, mimeType, ""); + uploadFile(path, istream, mimeType, ""); } void getFile(const std::string & path, Sink & sink) override @@ -410,7 +415,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore for (auto object : contents) { auto & key = object.GetKey(); if (key.size() != 40 || !hasSuffix(key, ".narinfo")) continue; - paths.insert(parseStorePath(storeDir + "/" + key.substr(0, key.size() - 8) + "-unknown")); + paths.insert(parseStorePath(storeDir + "/" + key.substr(0, key.size() - 8) + "-" + MissingName)); } marker = res.GetNextMarker(); |