Skip to content

Commit

Permalink
Switched biglake database to use parent field implementation (GoogleC…
Browse files Browse the repository at this point in the history
…loudPlatform#8790)

* Switched biglake database to use parent field implementation

* Added custom import

* Switched to id_format / import_format

* Fixed biglake database test file path

* Fixed function names
  • Loading branch information
melinath authored Aug 30, 2023
1 parent a9775a2 commit 958faf0
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 23 deletions.
21 changes: 8 additions & 13 deletions mmv1/products/biglake/Database.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,41 +20,36 @@ references: !ruby/object:Api::Resource::ReferenceLinks
guides:
"Manage open source metadata with BigLake Metastore": "https://cloud.google.com/bigquery/docs/manage-open-source-metadata#create_databases"
api: "https://cloud.google.com/bigquery/docs/reference/biglake/rest/v1/projects.locations.catalogs.databases"
base_url: "projects/{{project}}/locations/{{location}}/catalogs/{{catalog_id}}/databases"
self_link: "projects/{{project}}/locations/{{location}}/catalogs/{{catalog_id}}/databases/{{name}}"
create_url: "projects/{{project}}/locations/{{location}}/catalogs/{{catalog_id}}/databases?databaseId={{name}}"
base_url: "{{catalog}}/databases"
self_link: "{{catalog}}/databases/{{name}}"
create_url: "{{catalog}}/databases?databaseId={{name}}"
id_format: "{{catalog}}/databases/{{name}}"
import_format: ["{{%catalog}}/databases/{{name}}"]
update_mask: true
update_verb: :PATCH
examples:
- !ruby/object:Provider::Terraform::Examples
name: "bigquery_biglake_database"
name: "biglake_database"
primary_resource_id: "database"
vars:
name: "my_database"
catalog_id: "my_catalog"
bucket_id: "my_bucket"
parameters:
- !ruby/object:Api::Type::String
name: "location"
name: "catalog"
required: true
immutable: true
url_param_only: true
description: |
The geographic location where the Database should reside.
The parent catalog.
- !ruby/object:Api::Type::String
name: "name"
required: true
immutable: true
url_param_only: true
description: |
The name of the database.
- !ruby/object:Api::Type::String
name: "catalog_id"
required: true
immutable: true
url_param_only: true
description: |
The name of the parent catalog.
properties:
- !ruby/object:Api::Type::String
name: "createTime"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ resource "google_storage_bucket_object" "metadata_folder" {

resource "google_biglake_database" "<%= ctx[:primary_resource_id] %>" {
name = "<%= ctx[:vars]['name'] %>"
catalog_id = google_biglake_catalog.catalog.name
location = google_biglake_catalog.catalog.location
catalog = google_biglake_catalog.catalog.id
type = "HIVE"
hive_options {
location_uri = "gs://${google_storage_bucket.bucket.name}/${google_storage_bucket_object.metadata_folder.name}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (
"github.com/hashicorp/terraform-provider-google/google/acctest"
)

func TestAccBiglakeDatabase_bigqueryBiglakeDatabase_update(t *testing.T) {
func TestAccBiglakeDatabase_biglakeDatabase_update(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
Expand All @@ -21,28 +21,28 @@ func TestAccBiglakeDatabase_bigqueryBiglakeDatabase_update(t *testing.T) {
CheckDestroy: testAccCheckBiglakeDatabaseDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBiglakeDatabase_bigqueryBiglakeDatabaseExample(context),
Config: testAccBiglakeDatabase_biglakeDatabaseExample(context),
},
{
ResourceName: "google_biglake_database.database",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location", "name", "catalog_id"},
ImportStateVerifyIgnore: []string{"name", "catalog"},
},
{
Config: testAccBiglakeDatabase_bigqueryBiglakeDatabase_update(context),
Config: testAccBiglakeDatabase_biglakeDatabase_update(context),
},
{
ResourceName: "google_biglake_database.database",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location", "name", "catalog_id"},
ImportStateVerifyIgnore: []string{"name", "catalog"},
},
},
})
}

func testAccBiglakeDatabase_bigqueryBiglakeDatabase_update(context map[string]interface{}) string {
func testAccBiglakeDatabase_biglakeDatabase_update(context map[string]interface{}) string {
return acctest.Nprintf(`
resource "google_biglake_catalog" "catalog" {
name = "tf_test_my_catalog%{random_suffix}"
Expand All @@ -62,8 +62,7 @@ resource "google_storage_bucket_object" "metadata_folder" {
}
resource "google_biglake_database" "database" {
name = "tf_test_my_database%{random_suffix}"
catalog_id = google_biglake_catalog.catalog.name
location = google_biglake_catalog.catalog.location
catalog = google_biglake_catalog.catalog.id
type = "HIVE"
hive_options {
location_uri = "gs://${google_storage_bucket.bucket.name}/${google_storage_bucket_object.metadata_folder.name}/metadata/metadata"
Expand Down

0 comments on commit 958faf0

Please sign in to comment.