Skip to content

Commit

Permalink
Add PubSub action to DLP Job Trigger (#6757) (#4832)
Browse files Browse the repository at this point in the history
Signed-off-by: Modular Magician <[email protected]>

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Nov 2, 2022
1 parent ce9930a commit be6e976
Show file tree
Hide file tree
Showing 4 changed files with 138 additions and 2 deletions.
3 changes: 3 additions & 0 deletions .changelog/6757.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
dlp: added pubsub action to `google_data_loss_prevention_job_trigger`
```
67 changes: 66 additions & 1 deletion google-beta/resource_data_loss_prevention_job_trigger.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,25 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam
Description: `A task to execute on the completion of a job.`,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"pub_sub": {
Type: schema.TypeList,
Optional: true,
Description: `Publish a message into a given Pub/Sub topic when the job completes.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"topic": {
Type: schema.TypeString,
Required: true,
Description: `Cloud Pub/Sub topic to send notifications to.`,
},
},
},
ExactlyOneOf: []string{},
},
"save_findings": {
Type: schema.TypeList,
Required: true,
Optional: true,
Description: `Schedule for triggered jobs`,
MaxItems: 1,
Elem: &schema.Resource{
Expand Down Expand Up @@ -160,6 +176,7 @@ Only for use with external storage. Possible values: ["BASIC_COLUMNS", "GCS_COLU
},
},
},
ExactlyOneOf: []string{},
},
},
},
Expand Down Expand Up @@ -1114,6 +1131,7 @@ func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *sche
}
transformed = append(transformed, map[string]interface{}{
"save_findings": flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindings(original["saveFindings"], d, config),
"pub_sub": flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pubSub"], d, config),
})
}
return transformed
Expand Down Expand Up @@ -1179,6 +1197,23 @@ func flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindingsOutputConfi
return v
}

func flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["topic"] =
flattenDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(original["topic"], d, config)
return []interface{}{transformed}
}
func flattenDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}

func expandDataLossPreventionJobTriggerDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
Expand Down Expand Up @@ -1701,6 +1736,13 @@ func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d Terraf
transformed["saveFindings"] = transformedSaveFindings
}

transformedPubSub, err := expandDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pub_sub"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedPubSub); val.IsValid() && !isEmptyValue(val) {
transformed["pubSub"] = transformedPubSub
}

req = append(req, transformed)
}
return req, nil
Expand Down Expand Up @@ -1800,6 +1842,29 @@ func expandDataLossPreventionJobTriggerInspectJobActionsSaveFindingsOutputConfig
return v, nil
}

func expandDataLossPreventionJobTriggerInspectJobActionsPubSub(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})

transformedTopic, err := expandDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(original["topic"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedTopic); val.IsValid() && !isEmptyValue(val) {
transformed["topic"] = transformedTopic
}

return transformed, nil
}

func expandDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}

func resourceDataLossPreventionJobTriggerEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {
newObj := make(map[string]interface{})
newObj["jobTrigger"] = obj
Expand Down
57 changes: 57 additions & 0 deletions google-beta/resource_data_loss_prevention_job_trigger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,31 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample(t *testing.T
})
}

func TestAccDataLossPreventionJobTrigger_dlpJobTriggerPubsub(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"project": getTestProjectFromEnv(),
}

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataLossPreventionJobTrigger_publishToPubSub(context),
},
{
ResourceName: "google_data_loss_prevention_job_trigger.pubsub",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
},
},
})
}

func testAccDataLossPreventionJobTrigger_dlpJobTriggerBasic(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "basic" {
Expand Down Expand Up @@ -114,3 +139,35 @@ resource "google_data_loss_prevention_job_trigger" "basic" {
}
`, context)
}

func testAccDataLossPreventionJobTrigger_publishToPubSub(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "pubsub" {
parent = "projects/%{project}"
description = "Starting description"
display_name = "display"
triggers {
schedule {
recurrence_period_duration = "86400s"
}
}
inspect_job {
inspect_template_name = "fake"
actions {
pub_sub {
topic = "projects/%{project}/topics/bar"
}
}
storage_config {
cloud_storage_options {
file_set {
url = "gs://mybucket/directory/"
}
}
}
}
}
`, context)
}
13 changes: 12 additions & 1 deletion website/docs/r/data_loss_prevention_job_trigger.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -321,10 +321,15 @@ The following arguments are supported:
<a name="nested_actions"></a>The `actions` block supports:

* `save_findings` -
(Required)
(Optional)
Schedule for triggered jobs
Structure is [documented below](#nested_save_findings).

* `pub_sub` -
(Optional)
Publish a message into a given Pub/Sub topic when the job completes.
Structure is [documented below](#nested_pub_sub).


<a name="nested_save_findings"></a>The `save_findings` block supports:

Expand Down Expand Up @@ -368,6 +373,12 @@ The following arguments are supported:
Name of the table. If is not set a new one will be generated for you with the following format:
`dlp_googleapis_yyyy_mm_dd_[dlp_job_id]`. Pacific timezone will be used for generating the date details.

<a name="nested_pub_sub"></a>The `pub_sub` block supports:

* `topic` -
(Required)
Cloud Pub/Sub topic to send notifications to.

## Attributes Reference

In addition to the arguments listed above, the following computed attributes are exported:
Expand Down

0 comments on commit be6e976

Please sign in to comment.