Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

provider/aws: Adding some other simple S3 Bucket Object (Optional) Inputs #3265

Merged
merged 6 commits into from
Oct 12, 2015
70 changes: 63 additions & 7 deletions builtin/providers/aws/resource_aws_s3_bucket_object.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,37 @@ func resourceAwsS3BucketObject() *schema.Resource {
ForceNew: true,
},

"cache_control": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},

"content_disposition": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},

"content_encoding": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},

"content_language": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},

"content_type": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems like at least this one needs to be computed as well, I'm getting a plan loop with this config:

provider "aws" {
  region = "us-west-2"
}

resource "aws_s3_bucket" "object_bucket" {
    bucket = "tf-object-test-bucket-1234"
}

resource "aws_s3_bucket_object" "object" {
    bucket = "${aws_s3_bucket.object_bucket.bucket}"
    key = "test-key"
    source = "terraform.tfstate"
}
~ aws_s3_bucket_object.object
    content_type: "binary/octet-stream" => ""

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@catsby can a param be both Optional and Computed?

Computed: true,
},

"key": &schema.Schema{
Type: schema.TypeString,
Required: true,
Expand Down Expand Up @@ -74,16 +105,36 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro
content := v.(string)
body = bytes.NewReader([]byte(content))
} else {

return fmt.Errorf("Must specify \"source\" or \"content\" field")
}
putInput := &s3.PutObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
Body: body,
}

resp, err := s3conn.PutObject(
&s3.PutObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
Body: body,
})
if v, ok := d.GetOk("cache_control"); ok {
putInput.CacheControl = aws.String(v.(string))
}

if v, ok := d.GetOk("content_type"); ok {
putInput.ContentType = aws.String(v.(string))
}

if v, ok := d.GetOk("content_encoding"); ok {
putInput.ContentEncoding = aws.String(v.(string))
}

if v, ok := d.GetOk("content_language"); ok {
putInput.ContentLanguage = aws.String(v.(string))
}

if v, ok := d.GetOk("content_disposition"); ok {
putInput.ContentDisposition = aws.String(v.(string))
}

resp, err := s3conn.PutObject(putInput)
if err != nil {
return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err)
}
Expand Down Expand Up @@ -117,6 +168,12 @@ func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) err
return err
}

d.Set("cache_control", resp.CacheControl)
d.Set("content_disposition", resp.ContentDisposition)
d.Set("content_encoding", resp.ContentEncoding)
d.Set("content_language", resp.ContentLanguage)
d.Set("content_type", resp.ContentType)

log.Printf("[DEBUG] Reading S3 Bucket Object meta: %s", resp)
return nil
}
Expand All @@ -137,4 +194,3 @@ func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) e
}
return nil
}

47 changes: 43 additions & 4 deletions builtin/providers/aws/resource_aws_s3_bucket_object_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,31 @@ func TestAccAWSS3BucketObject_content(t *testing.T) {
})
}

func TestAccAWSS3BucketObject_withContentCharacteristics(t *testing.T) {
// first write some data to the tempfile just so it's not 0 bytes.
ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644)
resource.Test(t, resource.TestCase{
PreCheck: func() {
if err != nil {
panic(err)
}
testAccPreCheck(t)
},
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSS3BucketObjectDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccAWSS3BucketObjectConfig_withContentCharacteristics,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"),
resource.TestCheckResourceAttr(
"aws_s3_bucket_object.object", "content_type", "binary/octet-stream"),
),
},
},
})
}

func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error {
s3conn := testAccProvider.Meta().(*AWSClient).s3conn

Expand Down Expand Up @@ -110,9 +135,24 @@ resource "aws_s3_bucket" "object_bucket" {
bucket = "tf-object-test-bucket-%d"
}
resource "aws_s3_bucket_object" "object" {
bucket = "${aws_s3_bucket.object_bucket.bucket}"
key = "test-key"
source = "%s"
bucket = "${aws_s3_bucket.object_bucket.bucket}"
key = "test-key"
source = "%s"
content_type = "binary/octet-stream"
}
`, randomBucket, tf.Name())

var testAccAWSS3BucketObjectConfig_withContentCharacteristics = fmt.Sprintf(`
resource "aws_s3_bucket" "object_bucket_2" {
bucket = "tf-object-test-bucket-%d"
}

resource "aws_s3_bucket_object" "object" {
bucket = "${aws_s3_bucket.object_bucket_2.bucket}"
key = "test-key"
source = "%s"
content_language = "en"
content_type = "binary/octet-stream"
}
`, randomBucket, tf.Name())

Expand All @@ -126,4 +166,3 @@ resource "aws_s3_bucket_object" "object" {
content = "some_bucket_content"
}
`, randomBucket)

Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,13 @@ The following arguments are supported:

* `bucket` - (Required) The name of the bucket to put the file in.
* `key` - (Required) The name of the object once it is in the bucket.
* `source` - (Required unless `content` given) The path to the source file being uploaded to the bucket.
* `source` - (Required) The path to the source file being uploaded to the bucket.
* `content` - (Required unless `source` given) The literal content being uploaded to the bucket.
* `cache_control` - (Optional) Specifies caching behavior along the request/reply chain Read [w3c cache_control](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9) for futher details.
* `content_disposition` - (Optional) Specifies presentational information for the object. Read [wc3 content_disposition](http://www.w3.org/Protocols/rfc2616/rfc2616-sec19.html#sec19.5.1) for further information.
* `content_encoding` - (Optional) Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field. Read [w3c content encoding](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11) for further information.
* `content_language` - (Optional) The language the content is in e.g. en-US or en-GB.
* `content_type` - (Optional) A standard MIME type describing the format of the object data, e.g. application/octet-stream. All Valid MIME Types are valid for this input.

Either `source` or `content` must be provided to specify the bucket content.
These two arguments are mutually-exclusive.
Expand Down