From be6e97626a33fe43617ff2ef8af4ed1671f37780 Mon Sep 17 00:00:00 2001 From: The Magician Date: Wed, 2 Nov 2022 10:36:53 -0700 Subject: [PATCH] Add PubSub action to DLP Job Trigger (#6757) (#4832) Signed-off-by: Modular Magician Signed-off-by: Modular Magician --- .changelog/6757.txt | 3 + ...source_data_loss_prevention_job_trigger.go | 67 ++++++++++++++++++- ...e_data_loss_prevention_job_trigger_test.go | 57 ++++++++++++++++ ..._loss_prevention_job_trigger.html.markdown | 13 +++- 4 files changed, 138 insertions(+), 2 deletions(-) create mode 100644 .changelog/6757.txt diff --git a/.changelog/6757.txt b/.changelog/6757.txt new file mode 100644 index 0000000000..d76154c9f2 --- /dev/null +++ b/.changelog/6757.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +dlp: added pubsub action to `google_data_loss_prevention_job_trigger` +``` diff --git a/google-beta/resource_data_loss_prevention_job_trigger.go b/google-beta/resource_data_loss_prevention_job_trigger.go index a738ac2779..a74336c61e 100644 --- a/google-beta/resource_data_loss_prevention_job_trigger.go +++ b/google-beta/resource_data_loss_prevention_job_trigger.go @@ -102,9 +102,25 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam Description: `A task to execute on the completion of a job.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "pub_sub": { + Type: schema.TypeList, + Optional: true, + Description: `Publish a message into a given Pub/Sub topic when the job completes.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "topic": { + Type: schema.TypeString, + Required: true, + Description: `Cloud Pub/Sub topic to send notifications to.`, + }, + }, + }, + ExactlyOneOf: []string{}, + }, "save_findings": { Type: schema.TypeList, - Required: true, + Optional: true, Description: `Schedule for triggered jobs`, MaxItems: 1, Elem: &schema.Resource{ @@ -160,6 +176,7 @@ Only for use with external storage. Possible values: ["BASIC_COLUMNS", "GCS_COLU }, }, }, + ExactlyOneOf: []string{}, }, }, }, @@ -1114,6 +1131,7 @@ func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *sche } transformed = append(transformed, map[string]interface{}{ "save_findings": flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindings(original["saveFindings"], d, config), + "pub_sub": flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pubSub"], d, config), }) } return transformed @@ -1179,6 +1197,23 @@ func flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindingsOutputConfi return v } +func flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["topic"] = + flattenDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(original["topic"], d, config) + return []interface{}{transformed} +} +func flattenDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + func expandDataLossPreventionJobTriggerDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { return v, nil } @@ -1701,6 +1736,13 @@ func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d Terraf transformed["saveFindings"] = transformedSaveFindings } + transformedPubSub, err := expandDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pub_sub"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedPubSub); val.IsValid() && !isEmptyValue(val) { + transformed["pubSub"] = transformedPubSub + } + req = append(req, transformed) } return req, nil @@ -1800,6 +1842,29 @@ func expandDataLossPreventionJobTriggerInspectJobActionsSaveFindingsOutputConfig return v, nil } +func expandDataLossPreventionJobTriggerInspectJobActionsPubSub(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedTopic, err := expandDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(original["topic"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedTopic); val.IsValid() && !isEmptyValue(val) { + transformed["topic"] = transformedTopic + } + + return transformed, nil +} + +func expandDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + func resourceDataLossPreventionJobTriggerEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) { newObj := make(map[string]interface{}) newObj["jobTrigger"] = obj diff --git a/google-beta/resource_data_loss_prevention_job_trigger_test.go b/google-beta/resource_data_loss_prevention_job_trigger_test.go index 7b057409ff..b38fa383a2 100644 --- a/google-beta/resource_data_loss_prevention_job_trigger_test.go +++ b/google-beta/resource_data_loss_prevention_job_trigger_test.go @@ -41,6 +41,31 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample(t *testing.T }) } +func TestAccDataLossPreventionJobTrigger_dlpJobTriggerPubsub(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": getTestProjectFromEnv(), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionJobTrigger_publishToPubSub(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.pubsub", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + }, + }) +} + func testAccDataLossPreventionJobTrigger_dlpJobTriggerBasic(context map[string]interface{}) string { return Nprintf(` resource "google_data_loss_prevention_job_trigger" "basic" { @@ -114,3 +139,35 @@ resource "google_data_loss_prevention_job_trigger" "basic" { } `, context) } + +func testAccDataLossPreventionJobTrigger_publishToPubSub(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "pubsub" { + parent = "projects/%{project}" + description = "Starting description" + display_name = "display" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + actions { + pub_sub { + topic = "projects/%{project}/topics/bar" + } + } + storage_config { + cloud_storage_options { + file_set { + url = "gs://mybucket/directory/" + } + } + } + } +} +`, context) +} diff --git a/website/docs/r/data_loss_prevention_job_trigger.html.markdown b/website/docs/r/data_loss_prevention_job_trigger.html.markdown index 97d467c6a9..3c963faef8 100644 --- a/website/docs/r/data_loss_prevention_job_trigger.html.markdown +++ b/website/docs/r/data_loss_prevention_job_trigger.html.markdown @@ -321,10 +321,15 @@ The following arguments are supported: The `actions` block supports: * `save_findings` - - (Required) + (Optional) Schedule for triggered jobs Structure is [documented below](#nested_save_findings). +* `pub_sub` - + (Optional) + Publish a message into a given Pub/Sub topic when the job completes. + Structure is [documented below](#nested_pub_sub). + The `save_findings` block supports: @@ -368,6 +373,12 @@ The following arguments are supported: Name of the table. If is not set a new one will be generated for you with the following format: `dlp_googleapis_yyyy_mm_dd_[dlp_job_id]`. Pacific timezone will be used for generating the date details. +The `pub_sub` block supports: + +* `topic` - + (Required) + Cloud Pub/Sub topic to send notifications to. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are exported: