mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-10 08:16:15 +02:00
Don't freak out if we get a 403 from S3
As far as we're concerned, not being able to access a file just means the file is missing. Plus, AWS explicitly goes out of its way to return a 403 if the file is missing and the requester doesn't have permission to list the bucket. Also getting rid of an old hack that Eelco said was only relevant to an older AWS SDK.
This commit is contained in:
parent
1969f357b7
commit
3105679226
1 changed files with 2 additions and 2 deletions
|
@ -241,8 +241,8 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
|
|||
auto & error = res.GetError();
|
||||
if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND
|
||||
|| error.GetErrorType() == Aws::S3::S3Errors::NO_SUCH_KEY
|
||||
|| (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN // FIXME
|
||||
&& error.GetMessage().find("404") != std::string::npos))
|
||||
// If bucket listing is disabled, 404s turn into 403s
|
||||
|| error.GetErrorType() == Aws::S3::S3Errors::ACCESS_DENIED)
|
||||
return false;
|
||||
throw Error(format("AWS error fetching '%s': %s") % path % error.GetMessage());
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue