Skip to content

Commit

Permalink
fix datastream_stream dataset id import (#7451)
Browse files Browse the repository at this point in the history
  • Loading branch information
ScottSuarez authored Mar 15, 2023
1 parent 01bde43 commit 7c55df9
Show file tree
Hide file tree
Showing 5 changed files with 157 additions and 1 deletion.
5 changes: 4 additions & 1 deletion mmv1/products/datastream/Stream.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -716,7 +716,10 @@ properties:
name: 'datasetId'
required: true
description: |
Dataset ID in the format projects/{project}/datasets/{dataset_id}
Dataset ID in the format projects/{project}/datasets/{dataset_id} or
{project}:{dataset_id}
custom_expand: 'templates/terraform/custom_expand/datastream_stream_dataset_id.go.erb'
diff_suppress_func: resourceDatastreamStreamDatabaseIdDiffSuppress
- !ruby/object:Api::Type::NestedObject
name: 'sourceHierarchyDatasets'
exactly_one_of:
Expand Down
6 changes: 6 additions & 0 deletions mmv1/products/datastream/terraform.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,12 @@ overrides: !ruby/object:Overrides::ResourceOverrides
stream_id: "my-stream"
source_connection_profile_id: "source-profile"
destination_connection_profile_id: "destination-profile"
- !ruby/object:Provider::Terraform::Examples
name: "datastream_stream_postgresql_bigquery_dataset_id"
primary_resource_id: "default"
pull_external: true
# Random provider
skip_vcr: true
- !ruby/object:Provider::Terraform::Examples
name: "datastream_stream_bigquery"
pull_external: true
Expand Down
15 changes: 15 additions & 0 deletions mmv1/templates/terraform/constants/datastream_stream.go.erb
Original file line number Diff line number Diff line change
Expand Up @@ -63,4 +63,19 @@ func waitForDatastreamStreamReady(d *schema.ResourceData, config *Config, timeou
}
})
}

func resourceDatastreamStreamDatabaseIdDiffSuppress(_, old, new string, _ *schema.ResourceData) bool {
re := regexp.MustCompile(`projects/(.+)/datasets/([^\.\?\#]+)`)
paths := re.FindStringSubmatch(new)

// db returns value in form <project>:<dataset_id>
if len(paths) == 3 {
project := paths[1]
datasetId := paths[2]
new = fmt.Sprintf("%s:%s", project, datasetId)
}

return old == new
}

<% end -%>
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
<%# # the license inside this if block pertains to this file
# Copyright 2023 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#%>
func expand<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
s := v.(string)
re := regexp.MustCompile(`projects/(.+)/datasets/([^\.\?\#]+)`)
paths := re.FindStringSubmatch(s)
if len(paths) == 3 {
project := paths[1]
datasetId := paths[2]
return fmt.Sprintf("%s:%s", project, datasetId), nil
}

return s, nil
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@

resource "google_bigquery_dataset" "postgres" {
dataset_id = "postgres%{random_suffix}"
friendly_name = "postgres"
description = "Database of postgres"
location = "us-central1"
}

resource "google_datastream_stream" "default" {
display_name = "postgres to bigQuery"
location = "us-central1"
stream_id = "postgres-to-big-query%{random_suffix}"

source_config {
source_connection_profile = google_datastream_connection_profile.source_connection_profile.id
mysql_source_config {}
}

destination_config {
destination_connection_profile = google_datastream_connection_profile.destination_connection_profile2.id
bigquery_destination_config {
data_freshness = "900s"
single_target_dataset {
dataset_id = google_bigquery_dataset.postgres.id
}
}
}

backfill_all {
}

}

resource "google_datastream_connection_profile" "destination_connection_profile2" {
display_name = "Connection profile"
location = "us-central1"
connection_profile_id = "tf-test-destination-profile%{random_suffix}"
bigquery_profile {}
}

resource "google_sql_database_instance" "instance" {
name = "tf-test-my-instance%{random_suffix}"
database_version = "MYSQL_8_0"
region = "us-central1"
settings {
tier = "db-f1-micro"
backup_configuration {
enabled = true
binary_log_enabled = true
}

ip_configuration {
// Datastream IPs will vary by region.
authorized_networks {
value = "34.71.242.81"
}

authorized_networks {
value = "34.72.28.29"
}

authorized_networks {
value = "34.67.6.157"
}

authorized_networks {
value = "34.67.234.134"
}

authorized_networks {
value = "34.72.239.218"
}
}
}

deletion_protection = false
}

resource "google_sql_database" "db" {
instance = google_sql_database_instance.instance.name
name = "db"
}

resource "random_password" "pwd" {
length = 16
special = false
}

resource "google_sql_user" "user" {
name = "user%{random_suffix}"
instance = google_sql_database_instance.instance.name
host = "%"
password = random_password.pwd.result
}

resource "google_datastream_connection_profile" "source_connection_profile" {
display_name = "Source connection profile"
location = "us-central1"
connection_profile_id = "tf-test-source-profile%{random_suffix}"

mysql_profile {
hostname = google_sql_database_instance.instance.public_ip_address
username = google_sql_user.user.name
password = google_sql_user.user.password
}
}

0 comments on commit 7c55df9

Please sign in to comment.