Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

backend/s3: Configure AWS Client MaxRetries and provide enhanced S3 NoSuchBucket error message #19951

Merged
merged 1 commit into from
Jan 11, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions backend/remote-state/s3/backend.go
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,13 @@ func New() backend.Backend {
Description: "Force s3 to use path style api.",
Default: false,
},

"max_retries": {
Type: schema.TypeInt,
Optional: true,
Description: "The maximum number of times an AWS API request is retried on retryable failure.",
Default: 5,
},
},
}

Expand Down Expand Up @@ -285,6 +292,7 @@ func (b *Backend) configure(ctx context.Context) error {
SkipRequestingAccountId: data.Get("skip_requesting_account_id").(bool),
SkipMetadataApiCheck: data.Get("skip_metadata_api_check").(bool),
S3ForcePathStyle: data.Get("force_path_style").(bool),
MaxRetries: data.Get("max_retries").(int),
}

client, err := cfg.Client()
Expand Down
4 changes: 4 additions & 0 deletions backend/remote-state/s3/backend_state.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"strings"

"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/s3"

"github.com/hashicorp/terraform/backend"
Expand All @@ -29,6 +30,9 @@ func (b *Backend) Workspaces() ([]string, error) {

resp, err := b.s3Client.ListObjects(params)
if err != nil {
if awsErr, ok := err.(awserr.Error); ok && awsErr.Code() == s3.ErrCodeNoSuchBucket {
return nil, fmt.Errorf(errS3NoSuchBucket, err)
}
return nil, err
}

Expand Down
98 changes: 42 additions & 56 deletions backend/remote-state/s3/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -98,30 +98,21 @@ func (c *RemoteClient) get() (*remote.Payload, error) {
var output *s3.GetObjectOutput
var err error

// we immediately retry on an internal error, as those are usually transient
maxRetries := 2
for retryCount := 0; ; retryCount++ {
output, err = c.s3Client.GetObject(&s3.GetObjectInput{
Bucket: &c.bucketName,
Key: &c.path,
})
output, err = c.s3Client.GetObject(&s3.GetObjectInput{
Bucket: &c.bucketName,
Key: &c.path,
})

if err != nil {
if awserr, ok := err.(awserr.Error); ok {
switch awserr.Code() {
case s3.ErrCodeNoSuchKey:
return nil, nil
case s3ErrCodeInternalError:
if retryCount > maxRetries {
return nil, err
}
log.Println("[WARN] s3 internal error, retrying...")
continue
}
if err != nil {
if awserr, ok := err.(awserr.Error); ok {
switch awserr.Code() {
case s3.ErrCodeNoSuchBucket:
return nil, fmt.Errorf(errS3NoSuchBucket, err)
case s3.ErrCodeNoSuchKey:
return nil, nil
}
return nil, err
}
break
return nil, err
}

defer output.Body.Close()
Expand Down Expand Up @@ -149,46 +140,32 @@ func (c *RemoteClient) Put(data []byte) error {
contentType := "application/json"
contentLength := int64(len(data))

// we immediately retry on an internal error, as those are usually transient
maxRetries := 2
for retryCount := 0; ; retryCount++ {
i := &s3.PutObjectInput{
ContentType: &contentType,
ContentLength: &contentLength,
Body: bytes.NewReader(data),
Bucket: &c.bucketName,
Key: &c.path,
}
i := &s3.PutObjectInput{
ContentType: &contentType,
ContentLength: &contentLength,
Body: bytes.NewReader(data),
Bucket: &c.bucketName,
Key: &c.path,
}

if c.serverSideEncryption {
if c.kmsKeyID != "" {
i.SSEKMSKeyId = &c.kmsKeyID
i.ServerSideEncryption = aws.String("aws:kms")
} else {
i.ServerSideEncryption = aws.String("AES256")
}
if c.serverSideEncryption {
if c.kmsKeyID != "" {
i.SSEKMSKeyId = &c.kmsKeyID
i.ServerSideEncryption = aws.String("aws:kms")
} else {
i.ServerSideEncryption = aws.String("AES256")
}
}

if c.acl != "" {
i.ACL = aws.String(c.acl)
}
if c.acl != "" {
i.ACL = aws.String(c.acl)
}

log.Printf("[DEBUG] Uploading remote state to S3: %#v", i)
log.Printf("[DEBUG] Uploading remote state to S3: %#v", i)

_, err := c.s3Client.PutObject(i)
if err != nil {
if awserr, ok := err.(awserr.Error); ok {
if awserr.Code() == s3ErrCodeInternalError {
if retryCount > maxRetries {
return fmt.Errorf("failed to upload state: %s", err)
}
log.Println("[WARN] s3 internal error, retrying...")
continue
}
}
return fmt.Errorf("failed to upload state: %s", err)
}
break
_, err := c.s3Client.PutObject(i)
if err != nil {
return fmt.Errorf("failed to upload state: %s", err)
}

sum := md5.Sum(data)
Expand Down Expand Up @@ -414,3 +391,12 @@ persists, and neither S3 nor DynamoDB are experiencing an outage, you may need
to manually verify the remote state and update the Digest value stored in the
DynamoDB table to the following value: %x
`

const errS3NoSuchBucket = `S3 bucket does not exist.

The referenced S3 bucket must have been previously created. If the S3 bucket
was created within the last minute, please wait for a minute or two and try
again.

Error: %s
`
1 change: 1 addition & 0 deletions website/docs/backends/types/s3.html.md
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ The following configuration options or environment variables are supported:
* `skip_region_validation` - (Optional) Skip validation of provided region name.
* `skip_requesting_account_id` - (Optional) Skip requesting the account ID.
* `skip_metadata_api_check` - (Optional) Skip the AWS Metadata API check.
* `max_retries` - (Optional) The maximum number of times an AWS API request is retried on retryable failure. Defaults to 5.

## Multi-account AWS Architecture

Expand Down