Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add two outputs for dlp job trigger: outputting to SCC and DataCatalog #13562

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/6855.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
dlp: added `publish_findings_to_cloud_data_catalog` and `publish_summary_to_cscc` to `google_data_loss_prevention_job_trigger` resource
```
88 changes: 85 additions & 3 deletions google/resource_data_loss_prevention_job_trigger.go
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,30 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam
},
ExactlyOneOf: []string{},
},
"publish_findings_to_cloud_data_catalog": {
Type: schema.TypeList,
Optional: true,
Description: `Publish findings of a DlpJob to Data Catalog.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{},
},
ExactlyOneOf: []string{},
},
"publish_summary_to_cscc": {
Type: schema.TypeList,
Optional: true,
Description: `Publish the result summary of a DlpJob to the Cloud Security Command Center.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{},
},
ExactlyOneOf: []string{},
},
"save_findings": {
Type: schema.TypeList,
Optional: true,
Description: `Schedule for triggered jobs`,
Description: `If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
Expand Down Expand Up @@ -1235,8 +1255,10 @@ func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *sche
continue
}
transformed = append(transformed, map[string]interface{}{
"save_findings": flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindings(original["saveFindings"], d, config),
"pub_sub": flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pubSub"], d, config),
"save_findings": flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindings(original["saveFindings"], d, config),
"pub_sub": flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pubSub"], d, config),
"publish_summary_to_cscc": flattenDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(original["publishSummaryToCscc"], d, config),
"publish_findings_to_cloud_data_catalog": flattenDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(original["publishFindingsToCloudDataCatalog"], d, config),
})
}
return transformed
Expand Down Expand Up @@ -1319,6 +1341,22 @@ func flattenDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface
return v
}

func flattenDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return nil
}
transformed := make(map[string]interface{})
return []interface{}{transformed}
}

func flattenDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return nil
}
transformed := make(map[string]interface{})
return []interface{}{transformed}
}

func expandDataLossPreventionJobTriggerDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
Expand Down Expand Up @@ -1914,6 +1952,20 @@ func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d Terraf
transformed["pubSub"] = transformedPubSub
}

transformedPublishSummaryToCscc, err := expandDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(original["publish_summary_to_cscc"], d, config)
if err != nil {
return nil, err
} else {
transformed["publishSummaryToCscc"] = transformedPublishSummaryToCscc
}

transformedPublishFindingsToCloudDataCatalog, err := expandDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(original["publish_findings_to_cloud_data_catalog"], d, config)
if err != nil {
return nil, err
} else {
transformed["publishFindingsToCloudDataCatalog"] = transformedPublishFindingsToCloudDataCatalog
}

req = append(req, transformed)
}
return req, nil
Expand Down Expand Up @@ -2036,6 +2088,36 @@ func expandDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface{
return v, nil
}

func expandDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 {
return nil, nil
}

if l[0] == nil {
transformed := make(map[string]interface{})
return transformed, nil
}
transformed := make(map[string]interface{})

return transformed, nil
}

func expandDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 {
return nil, nil
}

if l[0] == nil {
transformed := make(map[string]interface{})
return transformed, nil
}
transformed := make(map[string]interface{})

return transformed, nil
}

func resourceDataLossPreventionJobTriggerEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {
newObj := make(map[string]interface{})
newObj["jobTrigger"] = obj
Expand Down
122 changes: 122 additions & 0 deletions google/resource_data_loss_prevention_job_trigger_generated_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -222,6 +222,128 @@ resource "google_data_loss_prevention_job_trigger" "bigquery_row_limit_percentag
`, context)
}

func TestAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"project": getTestProjectFromEnv(),
"random_suffix": randString(t, 10),
}

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(context),
},
{
ResourceName: "google_data_loss_prevention_job_trigger.data_catalog_output",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
},
},
})
}

func testAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "data_catalog_output" {
parent = "projects/%{project}"
description = "Description"
display_name = "Displayname"

triggers {
schedule {
recurrence_period_duration = "86400s"
}
}

inspect_job {
inspect_template_name = "fake"
actions {
publish_findings_to_cloud_data_catalog {
}
}
storage_config {
big_query_options {
table_reference {
project_id = "project"
dataset_id = "dataset"
table_id = "table_to_scan"
}
rows_limit_percent = 50
sample_method = "RANDOM_START"
}
}
}
}
`, context)
}

func TestAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"project": getTestProjectFromEnv(),
"random_suffix": randString(t, 10),
}

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(context),
},
{
ResourceName: "google_data_loss_prevention_job_trigger.scc_output",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
},
},
})
}

func testAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "scc_output" {
parent = "projects/%{project}"
description = "Description"
display_name = "Displayname"

triggers {
schedule {
recurrence_period_duration = "86400s"
}
}

inspect_job {
inspect_template_name = "fake"
actions {
publish_summary_to_cscc {
}
}
storage_config {
big_query_options {
table_reference {
project_id = "project"
dataset_id = "dataset"
table_id = "table_to_scan"
}
rows_limit_percent = 50
sample_method = "RANDOM_START"
}
}
}
}
`, context)
}

func testAccCheckDataLossPreventionJobTriggerDestroyProducer(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
Expand Down
10 changes: 9 additions & 1 deletion website/docs/r/data_loss_prevention_job_trigger.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -437,14 +437,22 @@ The following arguments are supported:

* `save_findings` -
(Optional)
Schedule for triggered jobs
If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk
Structure is [documented below](#nested_save_findings).

* `pub_sub` -
(Optional)
Publish a message into a given Pub/Sub topic when the job completes.
Structure is [documented below](#nested_pub_sub).

* `publish_summary_to_cscc` -
(Optional)
Publish the result summary of a DlpJob to the Cloud Security Command Center.

* `publish_findings_to_cloud_data_catalog` -
(Optional)
Publish findings of a DlpJob to Data Catalog.


<a name="nested_save_findings"></a>The `save_findings` block supports:

Expand Down