aboutsummaryrefslogtreecommitdiff
path: root/src/libstore/s3-binary-cache-store.cc
diff options
context:
space:
mode:
authorEelco Dolstra <edolstra@gmail.com>2018-08-06 15:40:29 +0200
committerEelco Dolstra <edolstra@gmail.com>2018-08-06 15:40:29 +0200
commitd3761f5f8bce1e4c8dcfdff3fa77c173157c0346 (patch)
tree1dae65410f1977d58ad7aa7741121cbe74c33da3 /src/libstore/s3-binary-cache-store.cc
parentfa4def3d4675c8b2d6aacb56959dbbf9e52df66a (diff)
Fix Brotli decompression in 'nix log'
This didn't work anymore since decompression was only done in the non-coroutine case. Decompressors are now sinks, just like compressors. Also fixed a bug in bzip2 API handling (we have to handle BZ_RUN_OK rather than BZ_OK), which we didn't notice because there was a missing 'throw': if (ret != BZ_OK) CompressionError("error while compressing bzip2 file");
Diffstat (limited to 'src/libstore/s3-binary-cache-store.cc')
-rw-r--r--src/libstore/s3-binary-cache-store.cc6
1 files changed, 2 insertions, 4 deletions
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 2f18e3f38..660583d31 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -153,10 +153,8 @@ S3Helper::DownloadResult S3Helper::getObject(
auto result = checkAws(fmt("AWS error fetching '%s'", key),
client->GetObject(request));
- res.data = decodeContent(
- result.GetContentEncoding(),
- make_ref<std::string>(
- dynamic_cast<std::stringstream &>(result.GetBody()).str()));
+ res.data = decompress(result.GetContentEncoding(),
+ dynamic_cast<std::stringstream &>(result.GetBody()).str());
} catch (S3Error & e) {
if (e.err != Aws::S3::S3Errors::NO_SUCH_KEY) throw;