diff --git a/.changelog/23193.txt b/.changelog/23193.txt new file mode 100644 index 00000000000..262fa0860a4 --- /dev/null +++ b/.changelog/23193.txt @@ -0,0 +1,3 @@ +```release-note:new-data-source +aws_backup_framework +``` \ No newline at end of file diff --git a/internal/provider/provider.go b/internal/provider/provider.go index db971b2ca64..4279331c73c 100644 --- a/internal/provider/provider.go +++ b/internal/provider/provider.go @@ -417,6 +417,7 @@ func Provider() *schema.Provider { "aws_autoscaling_groups": autoscaling.DataSourceGroups(), "aws_launch_configuration": autoscaling.DataSourceLaunchConfiguration(), + "aws_backup_framework": backup.DataSourceFramework(), "aws_backup_plan": backup.DataSourcePlan(), "aws_backup_report_plan": backup.DataSourceReportPlan(), "aws_backup_selection": backup.DataSourceSelection(), diff --git a/internal/service/backup/framework_data_source.go b/internal/service/backup/framework_data_source.go new file mode 100644 index 00000000000..f8c91322670 --- /dev/null +++ b/internal/service/backup/framework_data_source.go @@ -0,0 +1,139 @@ +package backup + +import ( + "fmt" + "time" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/service/backup" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-aws/internal/conns" + tftags "github.com/hashicorp/terraform-provider-aws/internal/tags" +) + +func DataSourceFramework() *schema.Resource { + return &schema.Resource{ + Read: dataSourceFrameworkRead, + + Schema: map[string]*schema.Schema{ + "arn": { + Type: schema.TypeString, + Computed: true, + }, + "control": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "input_parameter": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "value": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "scope": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "compliance_resource_ids": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "compliance_resource_types": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "tags": tftags.TagsSchemaComputed(), + }, + }, + }, + }, + }, + }, + "creation_time": { + Type: schema.TypeString, + Computed: true, + }, + "deployment_status": { + Type: schema.TypeString, + Computed: true, + }, + "description": { + Type: schema.TypeString, + Computed: true, + }, + "name": { + Type: schema.TypeString, + Required: true, + }, + "status": { + Type: schema.TypeString, + Computed: true, + }, + "tags": tftags.TagsSchemaComputed(), + }, + } +} + +func dataSourceFrameworkRead(d *schema.ResourceData, meta interface{}) error { + conn := meta.(*conns.AWSClient).BackupConn + ignoreTagsConfig := meta.(*conns.AWSClient).IgnoreTagsConfig + + name := d.Get("name").(string) + + resp, err := conn.DescribeFramework(&backup.DescribeFrameworkInput{ + FrameworkName: aws.String(name), + }) + if err != nil { + return fmt.Errorf("Error getting Backup Framework: %w", err) + } + + d.SetId(aws.StringValue(resp.FrameworkName)) + + d.Set("arn", resp.FrameworkArn) + d.Set("deployment_status", resp.DeploymentStatus) + d.Set("description", resp.FrameworkDescription) + d.Set("name", resp.FrameworkName) + d.Set("status", resp.FrameworkStatus) + + if err := d.Set("creation_time", resp.CreationTime.Format(time.RFC3339)); err != nil { + return fmt.Errorf("error setting creation_time: %s", err) + } + + if err := d.Set("control", flattenFrameworkControls(resp.FrameworkControls)); err != nil { + return fmt.Errorf("error setting control: %w", err) + } + + tags, err := ListTags(conn, aws.StringValue(resp.FrameworkArn)) + + if err != nil { + return fmt.Errorf("error listing tags for Backup Framework (%s): %w", d.Id(), err) + } + + if err := d.Set("tags", tags.IgnoreAWS().IgnoreConfig(ignoreTagsConfig).Map()); err != nil { + return fmt.Errorf("error setting tags: %w", err) + } + + return nil +} diff --git a/internal/service/backup/framework_data_source_test.go b/internal/service/backup/framework_data_source_test.go new file mode 100644 index 00000000000..31fdeccca86 --- /dev/null +++ b/internal/service/backup/framework_data_source_test.go @@ -0,0 +1,215 @@ +package backup_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/aws/aws-sdk-go/service/backup" + sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-aws/internal/acctest" +) + +func TestAccBackupFrameworkDataSource_basic(t *testing.T) { + datasourceName := "data.aws_backup_framework.test" + resourceName := "aws_backup_framework.test" + rName := fmt.Sprintf("tf_acc_test_%s", sdkacctest.RandString(7)) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t); testAccFrameworkPreCheck(t) }, + ErrorCheck: acctest.ErrorCheck(t, backup.EndpointsID), + Providers: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: testAccFrameworkDataSourceConfig_nonExistent, + ExpectError: regexp.MustCompile(`Error getting Backup Framework`), + }, + { + Config: testAccFrameworkDataSourceConfig_basic(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrPair(datasourceName, "arn", resourceName, "arn"), + resource.TestCheckResourceAttrPair(datasourceName, "control.#", resourceName, "control.#"), + resource.TestCheckTypeSetElemNestedAttrs(datasourceName, "control.*", map[string]string{ + "name": "BACKUP_RECOVERY_POINT_MINIMUM_RETENTION_CHECK", + "input_parameter.#": "1", + "input_parameter.0.name": "requiredRetentionDays", + "input_parameter.0.value": "35", + }), + resource.TestCheckTypeSetElemNestedAttrs(datasourceName, "control.*", map[string]string{ + "name": "BACKUP_PLAN_MIN_FREQUENCY_AND_MIN_RETENTION_CHECK", + "input_parameter.#": "3", + }), + resource.TestCheckTypeSetElemNestedAttrs(datasourceName, "control.*", map[string]string{ + "name": "BACKUP_RECOVERY_POINT_ENCRYPTED", + }), + resource.TestCheckTypeSetElemNestedAttrs(datasourceName, "control.*", map[string]string{ + "name": "BACKUP_RESOURCES_PROTECTED_BY_BACKUP_PLAN", + "scope.#": "1", + "scope.0.compliance_resource_ids.#": "1", + "scope.0.compliance_resource_types.#": "1", + "scope.0.compliance_resource_types.0": "EBS", + }), + resource.TestCheckTypeSetElemNestedAttrs(datasourceName, "control.*", map[string]string{ + "name": "BACKUP_RECOVERY_POINT_MANUAL_DELETION_DISABLED", + }), + resource.TestCheckTypeSetElemAttrPair(datasourceName, "control.*.scope.0.compliance_resource_ids.0", "aws_ebs_volume.test", "id"), + resource.TestCheckResourceAttrPair(datasourceName, "creation_time", resourceName, "creation_time"), + resource.TestCheckResourceAttrPair(datasourceName, "deployment_status", resourceName, "deployment_status"), + resource.TestCheckResourceAttrPair(datasourceName, "id", resourceName, "id"), + resource.TestCheckResourceAttrPair(datasourceName, "name", resourceName, "name"), + resource.TestCheckResourceAttrPair(datasourceName, "status", resourceName, "status"), + resource.TestCheckResourceAttrPair(datasourceName, "tags.%", resourceName, "tags.%"), + resource.TestCheckResourceAttrPair(datasourceName, "tags.Name", resourceName, "tags.Name"), + ), + }, + }, + }) +} + +func TestAccBackupFrameworkDataSource_controlScopeTag(t *testing.T) { + datasourceName := "data.aws_backup_framework.test" + resourceName := "aws_backup_framework.test" + rName := fmt.Sprintf("tf_acc_test_%s", sdkacctest.RandString(7)) + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t); testAccFrameworkPreCheck(t) }, + ErrorCheck: acctest.ErrorCheck(t, backup.EndpointsID), + Providers: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: testAccFrameworkDataSourceConfig_controlScopeTag(rName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrPair(datasourceName, "arn", resourceName, "arn"), + resource.TestCheckResourceAttrPair(datasourceName, "control.#", resourceName, "control.#"), + resource.TestCheckResourceAttrPair(datasourceName, "control.0.name", resourceName, "control.0.name"), + resource.TestCheckResourceAttrPair(datasourceName, "control.0.scope.#", resourceName, "control.0.scope.#"), + resource.TestCheckResourceAttrPair(datasourceName, "control.0.scope.0.tags.%", resourceName, "control.0.scope.0.tags.%"), + resource.TestCheckResourceAttrPair(datasourceName, "control.0.scope.0.tags.Name", resourceName, "control.0.scope.0.tags.Name"), + resource.TestCheckResourceAttrPair(datasourceName, "creation_time", resourceName, "creation_time"), + resource.TestCheckResourceAttrPair(datasourceName, "deployment_status", resourceName, "deployment_status"), + resource.TestCheckResourceAttrPair(datasourceName, "id", resourceName, "id"), + resource.TestCheckResourceAttrPair(datasourceName, "name", resourceName, "name"), + resource.TestCheckResourceAttrPair(datasourceName, "status", resourceName, "status"), + resource.TestCheckResourceAttrPair(datasourceName, "tags.%", resourceName, "tags.%"), + resource.TestCheckResourceAttrPair(datasourceName, "tags.Name", resourceName, "tags.Name"), + ), + }, + }, + }) +} + +const testAccFrameworkDataSourceConfig_nonExistent = ` +data "aws_backup_framework" "test" { + name = "tf_acc_test_does_not_exist" +} +` + +func testAccFrameworkDataSourceConfig_basic(rName string) string { + return fmt.Sprintf(` +data "aws_availability_zones" "available" { + state = "available" + + filter { + name = "opt-in-status" + values = ["opt-in-not-required"] + } +} + +resource "aws_ebs_volume" "test" { + availability_zone = data.aws_availability_zones.available.names[0] + type = "gp2" + size = 1 +} + +resource "aws_backup_framework" "test" { + name = %[1]q + description = "Example framework" + + control { + name = "BACKUP_RECOVERY_POINT_MINIMUM_RETENTION_CHECK" + + input_parameter { + name = "requiredRetentionDays" + value = "35" + } + } + + control { + name = "BACKUP_PLAN_MIN_FREQUENCY_AND_MIN_RETENTION_CHECK" + + input_parameter { + name = "requiredFrequencyUnit" + value = "hours" + } + + input_parameter { + name = "requiredRetentionDays" + value = "35" + } + + input_parameter { + name = "requiredFrequencyValue" + value = "1" + } + } + + control { + name = "BACKUP_RECOVERY_POINT_ENCRYPTED" + } + + control { + name = "BACKUP_RESOURCES_PROTECTED_BY_BACKUP_PLAN" + + scope { + compliance_resource_ids = [ + aws_ebs_volume.test.id + ] + + compliance_resource_types = [ + "EBS" + ] + } + } + + control { + name = "BACKUP_RECOVERY_POINT_MANUAL_DELETION_DISABLED" + } + + tags = { + "Name" = "Test Framework" + } +} + +data "aws_backup_framework" "test" { + name = aws_backup_framework.test.name +} +`, rName) +} + +func testAccFrameworkDataSourceConfig_controlScopeTag(rName string) string { + return fmt.Sprintf(` +resource "aws_backup_framework" "test" { + name = %[1]q + description = "Example framework" + + control { + name = "BACKUP_RESOURCES_PROTECTED_BY_BACKUP_PLAN" + + scope { + tags = { + "Name" = "Example" + } + } + } + + tags = { + "Name" = "Test Framework" + } +} + +data "aws_backup_framework" "test" { + name = aws_backup_framework.test.name +} +`, rName) +} diff --git a/website/docs/d/backup_framework.html.markdown b/website/docs/d/backup_framework.html.markdown new file mode 100644 index 00000000000..f3ea9ec1b2f --- /dev/null +++ b/website/docs/d/backup_framework.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Backup" +layout: "aws" +page_title: "AWS: aws_backup_framework" +description: |- + Provides details about an AWS Backup Framework. +--- + +# Data Source: aws_backup_framework + +Use this data source to get information on an existing backup framework. + +## Example Usage + +```terraform +data "aws_backup_framework" "example" { + name = "tf_example_backup_framework_name" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) The backup framework name. + +## Attributes Reference + +In addition to the arguments above, the following attributes are exported: + +* `arn` - The ARN of the backup framework. +* `control` - One or more control blocks that make up the framework. Each control in the list has a name, input parameters, and scope. Detailed below. +* `creation_time` - The date and time that a framework is created, in Unix format and Coordinated Universal Time (UTC). +* `deployment_status` - The deployment status of a framework. The statuses are: `CREATE_IN_PROGRESS` | `UPDATE_IN_PROGRESS` | `DELETE_IN_PROGRESS` | `COMPLETED`| `FAILED`. +* `description` - The description of the framework. +* `id` - The id of the framework. +* `status` - A framework consists of one or more controls. Each control governs a resource, such as backup plans, backup selections, backup vaults, or recovery points. You can also turn AWS Config recording on or off for each resource. The statuses are: `ACTIVE`, `PARTIALLY_ACTIVE`, `INACTIVE`, `UNAVAILABLE`. For more information refer to the [AWS documentation for Framework Status](https://docs.aws.amazon.com/aws-backup/latest/devguide/API_DescribeFramework.html#Backup-DescribeFramework-response-FrameworkStatus) +* `tags` - Metadata that helps organize the frameworks you create. + +### Control Attributes +For **control** the following attributes are supported: + +* `input_parameter` - One or more input parameter blocks. An example of a control with two parameters is: "backup plan frequency is at least daily and the retention period is at least 1 year". The first parameter is daily. The second parameter is 1 year. Detailed below. +* `name` - The name of a control. +* `scope` - The scope of a control. The control scope defines what the control will evaluate. Three examples of control scopes are: a specific backup plan, all backup plans with a specific tag, or all backup plans. Detailed below. + +### Input Parameter Attributes +For **input_parameter** the following attributes are supported: + +* `name` - The name of a parameter, for example, BackupPlanFrequency. +* `value` - The value of parameter, for example, hourly. + +### Scope Attributes +For **scope** the following attributes are supported: + +* `compliance_resource_ids` - The ID of the only AWS resource that you want your control scope to contain. +* `compliance_resource_types` - Describes whether the control scope includes one or more types of resources, such as EFS or RDS. +* `tags` - The tag key-value pair applied to those AWS resources that you want to trigger an evaluation for a rule. A maximum of one key-value pair can be provided. \ No newline at end of file